Index: CHANGES.txt
===================================================================
--- CHANGES.txt (revision 921633)
+++ CHANGES.txt (working copy)
@@ -83,6 +83,15 @@
with FuzzyQuery to ensure that exact matches are always scored higher,
because only the boost will be used in scoring. (Robert Muir)
+* LUCENE-2294: IndexWriter constructors have been deprecated in favor of a
+ single ctor which accepts IndexWriterConfig and a Directory. You can set all
+ the parameters related to IndexWriter on IndexWriterConfig. The different
+ setter/getter methods were deprecated as well. One should call
+ writer.getConfig().getXYZ() to query for a parameter XYZ.
+ Additionally, the setter/getter related to MergePolicy were deprecated as
+ well. One should interact with the MergePolicy directly.
+ (Shai Erera via Mike McCandless)
+
Bug fixes
* LUCENE-2119: Don't throw NegativeArraySizeException if you pass
Index: contrib/analyzers/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java
===================================================================
--- contrib/analyzers/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java (revision 921633)
+++ contrib/analyzers/common/src/test/org/apache/lucene/analysis/query/QueryAutoStopWordAnalyzerTest.java (working copy)
@@ -31,6 +31,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
@@ -51,7 +52,7 @@
super.setUp();
dir = new RAMDirectory();
appAnalyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(dir, appAnalyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, appAnalyzer));
int numDocs = 200;
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
Index: contrib/analyzers/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java
===================================================================
--- contrib/analyzers/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java (revision 921633)
+++ contrib/analyzers/common/src/test/org/apache/lucene/analysis/shingle/ShingleAnalyzerWrapperTest.java (working copy)
@@ -31,6 +31,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause;
@@ -59,7 +60,7 @@
*/
public IndexSearcher setUpSearcher(Analyzer analyzer) throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc;
doc = new Document();
Index: contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java
===================================================================
--- contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java (revision 921633)
+++ contrib/ant/src/java/org/apache/lucene/ant/IndexTask.java (working copy)
@@ -38,7 +38,10 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Searcher;
@@ -280,15 +283,17 @@
log("checkLastModified = " + checkLastModified, Project.MSG_VERBOSE);
- IndexWriter writer =
- new IndexWriter(dir, analyzer, create, IndexWriter.MaxFieldLength.LIMITED);
-
- writer.setUseCompoundFile(useCompoundIndex);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ Version.LUCENE_CURRENT, analyzer).setOpenMode(
+ create ? OpenMode.CREATE : OpenMode.APPEND));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundIndex);
+ lmp.setUseCompoundDocStore(useCompoundIndex);
+ lmp.setMergeFactor(mergeFactor);
int totalFiles = 0;
int totalIndexed = 0;
int totalIgnored = 0;
try {
- writer.setMergeFactor(mergeFactor);
for (int i = 0; i < rcs.size(); i++) {
ResourceCollection rc = rcs.elementAt(i);
Index: contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
===================================================================
--- contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java (revision 921633)
+++ contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java (working copy)
@@ -21,9 +21,12 @@
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.index.IndexDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MergeScheduler;
import org.apache.lucene.index.ConcurrentMergeScheduler;
import org.apache.lucene.index.MergePolicy;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.util.Version;
import java.io.BufferedOutputStream;
import java.io.File;
@@ -99,7 +102,7 @@
final double ramBuffer = config.get("ram.flush.mb",OpenIndexTask.DEFAULT_RAM_FLUSH_MB);
final int maxBuffered = config.get("max.buffered",OpenIndexTask.DEFAULT_MAX_BUFFERED);
- if (maxBuffered == IndexWriter.DISABLE_AUTO_FLUSH) {
+ if (maxBuffered == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
writer.setRAMBufferSizeMB(ramBuffer);
writer.setMaxBufferedDocs(maxBuffered);
} else {
@@ -147,10 +150,9 @@
Config config = runData.getConfig();
IndexWriter writer = new IndexWriter(runData.getDirectory(),
- runData.getAnalyzer(),
- true,
- getIndexDeletionPolicy(config),
- IndexWriter.MaxFieldLength.LIMITED);
+ new IndexWriterConfig(Version.LUCENE_31, runData.getAnalyzer())
+ .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(
+ getIndexDeletionPolicy(config)));
setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;
Index: contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/OpenIndexTask.java
===================================================================
--- contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/OpenIndexTask.java (revision 921633)
+++ contrib/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/OpenIndexTask.java (working copy)
@@ -21,7 +21,9 @@
import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.util.Version;
import java.io.IOException;
@@ -39,10 +41,10 @@
*/
public class OpenIndexTask extends PerfTask {
- public static final int DEFAULT_MAX_BUFFERED = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
- public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
+ public static final int DEFAULT_MAX_BUFFERED = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
+ public static final int DEFAULT_MAX_FIELD_LENGTH = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
public static final int DEFAULT_MERGE_PFACTOR = LogMergePolicy.DEFAULT_MERGE_FACTOR;
- public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB;
+ public static final double DEFAULT_RAM_FLUSH_MB = (int) IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
private String commitUserData;
public OpenIndexTask(PerfRunData runData) {
@@ -61,10 +63,9 @@
}
IndexWriter writer = new IndexWriter(runData.getDirectory(),
- runData.getAnalyzer(),
- CreateIndexTask.getIndexDeletionPolicy(config),
- IndexWriter.MaxFieldLength.UNLIMITED,
- ic);
+ new IndexWriterConfig(Version.LUCENE_CURRENT, runData.getAnalyzer())
+ .setIndexDeletionPolicy(CreateIndexTask.getIndexDeletionPolicy(config))
+ .setIndexCommit(ic));
CreateIndexTask.setIndexWriterConfig(writer, config);
runData.setIndexWriter(writer);
return 1;
Index: contrib/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java
===================================================================
--- contrib/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java (revision 921633)
+++ contrib/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.benchmark.byTask.feeds.DocMaker;
import org.apache.lucene.benchmark.byTask.feeds.ReutersQueryMaker;
@@ -36,12 +37,15 @@
import org.apache.lucene.collation.CollationKeyAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.SerialMergeScheduler;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.TermFreqVector;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.search.FieldCache.StringIndex;
import org.apache.lucene.search.FieldCache;
@@ -96,7 +100,9 @@
assertEquals("TestSearchTask was supposed to be called!",279,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
- IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
@@ -182,7 +188,7 @@
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
- IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("100 docs were added to the index, this is what we expect to find!",100,ir.numDocs());
@@ -221,7 +227,7 @@
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
- IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
@@ -294,7 +300,7 @@
assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", IndexReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
- IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
@@ -417,7 +423,9 @@
benchmark = execBenchmark(algLines2);
// now we should be able to open the index for write.
- IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),null,false,IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = IndexReader.open(benchmark.getRunData().getDirectory(), true);
@@ -655,7 +663,9 @@
// 2. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
- assertTrue("did not use the specified MergeScheduler", ((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getMergeScheduler()).called);
+ assertTrue("did not use the specified MergeScheduler",
+ ((MyMergeScheduler) benchmark.getRunData().getIndexWriter().getConfig()
+ .getMergeScheduler()).called);
benchmark.getRunData().getIndexWriter().close();
// 3. test number of docs in the index
@@ -743,10 +753,10 @@
// 2. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
final IndexWriter writer = benchmark.getRunData().getIndexWriter();
- assertEquals(2, writer.getMaxBufferedDocs());
- assertEquals(IndexWriter.DISABLE_AUTO_FLUSH, (int) writer.getRAMBufferSizeMB());
- assertEquals(3, writer.getMergeFactor());
- assertFalse(writer.getUseCompoundFile());
+ assertEquals(2, writer.getConfig().getMaxBufferedDocs());
+ assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB());
+ assertEquals(3, ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor());
+ assertFalse(((LogMergePolicy) writer.getMergePolicy()).getUseCompoundFile());
writer.close();
Directory dir = benchmark.getRunData().getDirectory();
IndexReader reader = IndexReader.open(dir, true);
Index: contrib/fast-vector-highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java
===================================================================
--- contrib/fast-vector-highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java (revision 921633)
+++ contrib/fast-vector-highlighter/src/java/org/apache/lucene/search/vectorhighlight/FieldTermStack.java (working copy)
@@ -30,10 +30,10 @@
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.TermFreqVector;
import org.apache.lucene.index.TermPositionVector;
import org.apache.lucene.index.TermVectorOffsetInfo;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
@@ -50,13 +50,13 @@
LinkedList termList = new LinkedList();
public static void main( String[] args ) throws Exception {
- Analyzer analyzer = new WhitespaceAnalyzer();
+ Analyzer analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
QueryParser parser = new QueryParser(Version.LUCENE_CURRENT, "f", analyzer );
Query query = parser.parse( "a x:b" );
FieldQuery fieldQuery = new FieldQuery( query, true, false );
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter( dir, analyzer, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer));
Document doc = new Document();
doc.add( new Field( "f", "a a a b b c a b b c d e f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
doc.add( new Field( "f", "b a b a f", Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
Index: contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java
===================================================================
--- contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (revision 921633)
+++ contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/AbstractTestCase.java (working copy)
@@ -35,8 +35,9 @@
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.PhraseQuery;
@@ -326,7 +327,8 @@
// make 1 doc with multi valued field
protected void make1dmfIndex( Analyzer analyzer, String... values ) throws Exception {
- IndexWriter writer = new IndexWriter( dir, analyzer, true, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setOpenMode(OpenMode.CREATE));
Document doc = new Document();
for( String value: values )
doc.add( new Field( F, value, Store.YES, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
@@ -338,7 +340,8 @@
// make 1 doc with multi valued & not analyzed field
protected void make1dmfIndexNA( String... values ) throws Exception {
- IndexWriter writer = new IndexWriter( dir, analyzerK, true, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzerK).setOpenMode(OpenMode.CREATE));
Document doc = new Document();
for( String value: values )
doc.add( new Field( F, value, Store.YES, Index.NOT_ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
Index: contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java
===================================================================
--- contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java (revision 921633)
+++ contrib/fast-vector-highlighter/src/test/org/apache/lucene/search/vectorhighlight/SimpleFragmentsBuilderTest.java (working copy)
@@ -24,7 +24,8 @@
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.Query;
public class SimpleFragmentsBuilderTest extends AbstractTestCase {
@@ -118,7 +119,8 @@
}
protected void makeUnstoredIndex() throws Exception {
- IndexWriter writer = new IndexWriter( dir, analyzerW, true, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzerW).setOpenMode(OpenMode.CREATE));
Document doc = new Document();
doc.add( new Field( F, "aaa", Store.NO, Index.ANALYZED, TermVector.WITH_POSITIONS_OFFSETS ) );
writer.addDocument( doc );
Index: contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java
===================================================================
--- contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java (revision 921633)
+++ contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterPhraseTest.java (working copy)
@@ -33,9 +33,9 @@
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermPositionVector;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
@@ -59,7 +59,7 @@
final String TEXT = "the fox jumped";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamConcurrent(),
@@ -102,7 +102,7 @@
final String TEXT = "the fox jumped";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamConcurrent(),
@@ -171,7 +171,7 @@
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamSparse(),
@@ -213,7 +213,7 @@
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
try {
final Document document = new Document();
document.add(new Field(FIELD, TEXT, Store.YES, Index.ANALYZED,
@@ -253,7 +253,7 @@
final String TEXT = "the fox did not jump";
final Directory directory = new RAMDirectory();
final IndexWriter indexWriter = new IndexWriter(directory,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), MaxFieldLength.UNLIMITED);
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
try {
final Document document = new Document();
document.add(new Field(FIELD, new TokenStreamSparse(),
Index: contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java
===================================================================
--- contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (revision 921633)
+++ contrib/highlighter/src/test/org/apache/lucene/search/highlight/HighlighterTest.java (working copy)
@@ -51,8 +51,9 @@
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanQuery;
@@ -80,7 +81,6 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.util.Version;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
@@ -89,8 +89,6 @@
*
*/
public class HighlighterTest extends BaseTokenStreamTestCase implements Formatter {
- // TODO: change to CURRENT, does not work because posIncr:
- static final Version TEST_VERSION = TEST_VERSION_CURRENT;
private IndexReader reader;
static final String FIELD_NAME = "contents";
@@ -99,7 +97,7 @@
RAMDirectory ramDir;
public IndexSearcher searcher = null;
int numHighlights = 0;
- final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
+ final Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
TopDocs hits;
String[] texts = {
@@ -120,7 +118,7 @@
public void testQueryScorerHits() throws Exception {
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
- QueryParser qp = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
query = qp.parse("\"very long\"");
searcher = new IndexSearcher(ramDir, true);
TopDocs hits = searcher.search(query, 10);
@@ -150,7 +148,7 @@
String s1 = "I call our world Flatland, not because we call it so,";
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
// Verify that a query against the default field results in text being
// highlighted
@@ -182,7 +180,7 @@
*/
private static String highlightField(Query query, String fieldName, String text)
throws IOException, InvalidTokenOffsetsException {
- TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION).tokenStream(fieldName, new StringReader(text));
+ TokenStream tokenStream = new StandardAnalyzer(TEST_VERSION_CURRENT).tokenStream(fieldName, new StringReader(text));
// Assuming "", "" used to highlight
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter();
QueryScorer scorer = new QueryScorer(query, fieldName, FIELD_NAME);
@@ -228,7 +226,7 @@
String q = "(" + f1c + ph1 + " OR " + f2c + ph1 + ") AND (" + f1c + ph2
+ " OR " + f2c + ph2 + ")";
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
- QueryParser qp = new QueryParser(TEST_VERSION, f1, analyzer);
+ QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, f1, analyzer);
Query query = qp.parse(q);
QueryScorer scorer = new QueryScorer(query, f1);
@@ -678,7 +676,7 @@
// Need to explicitly set the QueryParser property to use TermRangeQuery
// rather
// than RangeFilters
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse(queryString);
doSearching(query);
@@ -1028,7 +1026,7 @@
String srchkey = "football";
String s = "football-soccer in the euro 2004 footie competition";
- QueryParser parser = new QueryParser(TEST_VERSION, "bookid", analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "bookid", analyzer);
Query query = parser.parse(srchkey);
TokenStream tokenStream = analyzer.tokenStream(null, new StringReader(s));
@@ -1154,13 +1152,13 @@
sb.append(stopWords.iterator().next());
}
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
- Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION, stopWords).tokenStream(
+ Highlighter hg = getHighlighter(query, "data", new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords).tokenStream(
"data", new StringReader(sb.toString())), fm);// new Highlighter(fm,
// new
// QueryTermScorer(query));
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(100);
- match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
+ match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
@@ -1171,7 +1169,7 @@
// + whitespace)
sb.append(" ");
sb.append(goodWord);
- match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "data", sb.toString());
+ match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "data", sb.toString());
assertTrue("Matched text should be no more than 100 chars in length ", match.length() < hg
.getMaxDocCharsToAnalyze());
}
@@ -1192,11 +1190,11 @@
String text = "this is a text with searchterm in it";
SimpleHTMLFormatter fm = new SimpleHTMLFormatter();
- Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION,
+ Highlighter hg = getHighlighter(query, "text", new StandardAnalyzer(TEST_VERSION_CURRENT,
stopWords).tokenStream("text", new StringReader(text)), fm);
hg.setTextFragmenter(new NullFragmenter());
hg.setMaxDocCharsToAnalyze(36);
- String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION, stopWords), "text", text);
+ String match = hg.getBestFragment(new StandardAnalyzer(TEST_VERSION_CURRENT, stopWords), "text", text);
assertTrue(
"Matched text should contain remainder of text after highlighted query ",
match.endsWith("in it"));
@@ -1213,9 +1211,9 @@
numHighlights = 0;
// test to show how rewritten query can still be used
searcher = new IndexSearcher(ramDir, true);
- Analyzer analyzer = new StandardAnalyzer(TEST_VERSION);
+ Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
Query query = parser.parse("JF? or Kenned*");
System.out.println("Searching with primitive query");
// forget to set this and...
@@ -1326,7 +1324,8 @@
public void testMultiSearcher() throws Exception {
// setup index 1
RAMDirectory ramDir1 = new RAMDirectory();
- IndexWriter writer1 = new IndexWriter(ramDir1, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer1 = new IndexWriter(ramDir1, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d = new Document();
Field f = new Field(FIELD_NAME, "multiOne", Field.Store.YES, Field.Index.ANALYZED);
d.add(f);
@@ -1337,7 +1336,8 @@
// setup index 2
RAMDirectory ramDir2 = new RAMDirectory();
- IndexWriter writer2 = new IndexWriter(ramDir2, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer2 = new IndexWriter(ramDir2, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
d = new Document();
f = new Field(FIELD_NAME, "multiTwo", Field.Store.YES, Field.Index.ANALYZED);
d.add(f);
@@ -1350,7 +1350,7 @@
searchers[0] = new IndexSearcher(ramDir1, true);
searchers[1] = new IndexSearcher(ramDir2, true);
MultiSearcher multiSearcher = new MultiSearcher(searchers);
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, new StandardAnalyzer(TEST_VERSION));
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, new StandardAnalyzer(TEST_VERSION_CURRENT));
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse("multi*");
System.out.println("Searching for: " + query.toString(FIELD_NAME));
@@ -1384,7 +1384,7 @@
@Override
public void run() throws Exception {
String docMainText = "fred is one of the people";
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
Query query = parser.parse("fred category:people");
// highlighting respects fieldnames used in query
@@ -1530,64 +1530,64 @@
Highlighter highlighter;
String result;
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
// ///////////////// same tests, just put the bigger overlapping token
// first
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("foo");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("10");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hispeed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
- query = new QueryParser(TEST_VERSION, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
+ query = new QueryParser(TEST_VERSION_CURRENT, "text", new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).parse("hi speed");
highlighter = getHighlighter(query, "text", getTS2a(), HighlighterTest.this);
result = highlighter.getBestFragments(getTS2a(), s, 3, "...");
assertEquals("Hi-Speed10 foo", result);
@@ -1613,7 +1613,7 @@
}
private void makeIndex() throws IOException {
- IndexWriter writer = new IndexWriter( dir, a, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
writer.addDocument( doc( "t_text1", "more random words for second field del" ) );
writer.addDocument( doc( "t_text1", "random words for highlighting tests del" ) );
@@ -1623,7 +1623,7 @@
}
private void deleteDocument() throws IOException {
- IndexWriter writer = new IndexWriter( dir, a, false, MaxFieldLength.LIMITED );
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.deleteDocuments( new Term( "t_text1", "del" ) );
// To see negative idf, keep comment the following line
//writer.optimize();
@@ -1632,7 +1632,7 @@
private void searchIndex() throws IOException, ParseException, InvalidTokenOffsetsException {
String q = "t_text1:random";
- QueryParser parser = new QueryParser(TEST_VERSION, "t_text1", a );
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, "t_text1", a );
Query query = parser.parse( q );
IndexSearcher searcher = new IndexSearcher( dir, true );
// This scorer can return negative idf -> null fragment
@@ -1686,7 +1686,7 @@
}
public void doSearching(String queryString) throws Exception {
- QueryParser parser = new QueryParser(TEST_VERSION, FIELD_NAME, analyzer);
+ QueryParser parser = new QueryParser(TEST_VERSION_CURRENT, FIELD_NAME, analyzer);
parser.setEnablePositionIncrements(true);
parser.setMultiTermRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = parser.parse(queryString);
@@ -1725,7 +1725,8 @@
protected void setUp() throws Exception {
super.setUp();
ramDir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(ramDir, new StandardAnalyzer(TEST_VERSION), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < texts.length; i++) {
addDoc(writer, texts[i]);
}
Index: contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java
===================================================================
--- contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (revision 921633)
+++ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestEmptyIndex.java (working copy)
@@ -16,9 +16,13 @@
package org.apache.lucene.store.instantiated;
-import junit.framework.TestCase;
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.IndexSearcher;
@@ -26,12 +30,10 @@
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
-import java.util.Arrays;
-import java.io.IOException;
+public class TestEmptyIndex extends LuceneTestCase {
-public class TestEmptyIndex extends TestCase {
-
public void testSearch() throws Exception {
InstantiatedIndex ii = new InstantiatedIndex();
@@ -60,7 +62,7 @@
// make sure a Directory acts the same
Directory d = new RAMDirectory();
- new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
+ new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))).close();
r = IndexReader.open(d, false);
testNorms(r);
r.close();
@@ -93,7 +95,7 @@
// make sure a Directory acts the same
Directory d = new RAMDirectory();
- new IndexWriter(d, null, true, IndexWriter.MaxFieldLength.UNLIMITED).close();
+ new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))).close();
r = IndexReader.open(d, false);
termEnumTest(r);
r.close();
Index: contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java
===================================================================
--- contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (revision 921633)
+++ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestIndicesEquals.java (working copy)
@@ -30,6 +30,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
@@ -60,7 +61,9 @@
RAMDirectory dir = new RAMDirectory();
// create dir data
- IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
for (int i = 0; i < 20; i++) {
Document document = new Document();
assembleDocument(document, i);
@@ -84,7 +87,9 @@
InstantiatedIndex ii = new InstantiatedIndex();
// create dir data
- IndexWriter indexWriter = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
for (int i = 0; i < 500; i++) {
Document document = new Document();
assembleDocument(document, i);
Index: contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java
===================================================================
--- contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java (revision 921633)
+++ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestSerialization.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -35,7 +36,7 @@
Directory dir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("foo", "bar rab abr bra rba", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
doc.add(new Field("moo", "bar rab abr bra rba", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
Index: contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java
===================================================================
--- contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (revision 921633)
+++ contrib/instantiated/src/test/org/apache/lucene/store/instantiated/TestUnoptimizedReaderOnConstructor.java (working copy)
@@ -18,6 +18,8 @@
import java.io.IOException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@@ -32,17 +34,17 @@
public void test() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
addDocument(iw, "Hello, world!");
addDocument(iw, "All work and no play makes jack a dull boy");
iw.close();
- iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, tellus!");
addDocument(iw, "All work and no play makes danny a dull boy");
iw.close();
- iw = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
addDocument(iw, "Hello, earth!");
addDocument(iw, "All work and no play makes wendy a dull girl");
iw.close();
Index: contrib/lucli/src/java/lucli/LuceneMethods.java
===================================================================
--- contrib/lucli/src/java/lucli/LuceneMethods.java (revision 921633)
+++ contrib/lucli/src/java/lucli/LuceneMethods.java (working copy)
@@ -42,9 +42,11 @@
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.index.IndexReader.FieldOption;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.search.Collector;
@@ -169,7 +171,9 @@
public void optimize() throws IOException {
//open the index writer. False: don't create a new one
- IndexWriter indexWriter = new IndexWriter(indexName, createAnalyzer(), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter indexWriter = new IndexWriter(indexName, new IndexWriterConfig(
+ Version.LUCENE_CURRENT, createAnalyzer()).setOpenMode(
+ OpenMode.APPEND));
message("Starting to optimize index.");
long start = System.currentTimeMillis();
indexWriter.optimize();
Index: contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java
===================================================================
--- contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (revision 921633)
+++ contrib/memory/src/test/org/apache/lucene/index/memory/MemoryIndexTest.java (working copy)
@@ -42,6 +42,7 @@
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.Collector;
@@ -410,7 +411,7 @@
RAMDirectory dir = new RAMDirectory();
IndexWriter writer = null;
try {
- writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
writer.addDocument(doc);
writer.optimize();
return dir;
Index: contrib/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java
===================================================================
--- contrib/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java (revision 921633)
+++ contrib/misc/src/java/org/apache/lucene/index/MultiPassIndexSplitter.java (working copy)
@@ -22,10 +22,11 @@
import java.util.ArrayList;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.OpenBitSet;
+import org.apache.lucene.util.Version;
/**
* This tool splits input index into multiple equal parts. The method employed
@@ -88,8 +89,10 @@
}
}
}
- IndexWriter w = new IndexWriter(outputs[i], new WhitespaceAnalyzer(),
- true, MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(outputs[i], new IndexWriterConfig(
+ Version.LUCENE_CURRENT,
+ new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
System.err.println("Writing part " + (i + 1) + " ...");
w.addIndexes(new IndexReader[]{input});
w.close();
Index: contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java
===================================================================
--- contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java (revision 921633)
+++ contrib/misc/src/java/org/apache/lucene/misc/IndexMergeTool.java (working copy)
@@ -16,10 +16,13 @@
* limitations under the License.
*/
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.analysis.SimpleAnalyzer;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.Version;
import java.io.File;
import java.io.IOException;
@@ -36,7 +39,9 @@
}
FSDirectory mergedIndex = FSDirectory.open(new File(args[0]));
- IndexWriter writer = new IndexWriter(mergedIndex, new SimpleAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(
+ Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
Directory[] indexes = new Directory[args.length - 1];
for (int i = 1; i < args.length; i++) {
Index: contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/index/TestFieldNormModifier.java (working copy)
@@ -23,7 +23,6 @@
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.IndexSearcher;
@@ -58,7 +57,9 @@
@Override
protected void setUp() throws Exception {
super.setUp();
- IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(
+ TEST_VERSION_CURRENT)));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();
Index: contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/index/TestIndexSplitter.java (working copy)
@@ -20,7 +20,7 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@@ -35,7 +35,7 @@
_TestUtil.rmDir(destDir);
destDir.mkdirs();
FSDirectory fsDir = FSDirectory.open(dir);
- IndexWriter iw = new IndexWriter(fsDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(fsDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
for (int x=0; x < 100; x++) {
Document doc = TestIndexWriterReader.createDocument(x, "index", 5);
iw.addDocument(doc);
Index: contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/index/TestMultiPassIndexSplitter.java (working copy)
@@ -19,7 +19,6 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -32,8 +31,7 @@
protected void setUp() throws Exception {
super.setUp();
RAMDirectory dir = new RAMDirectory();
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc;
for (int i = 0; i < NUM_DOCS; i++) {
doc = new Document();
Index: contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/index/TestTermVectorAccessor.java (working copy)
@@ -28,7 +28,7 @@
public void test() throws Exception {
Directory dir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet()), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT, Collections.emptySet())));
Document doc;
Index: contrib/misc/src/test/org/apache/lucene/misc/ChainedFilterTest.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/misc/ChainedFilterTest.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/misc/ChainedFilterTest.java (working copy)
@@ -20,13 +20,12 @@
import java.util.Calendar;
import java.util.GregorianCalendar;
-import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CachingWrapperFilter;
@@ -58,8 +57,8 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer =
- new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Calendar cal = new GregorianCalendar();
cal.clear();
@@ -187,9 +186,7 @@
public void testWithCachingFilter() throws Exception {
Directory dir = new RAMDirectory();
- Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
-
- IndexWriter writer = new IndexWriter(dir, analyzer, true, MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.close();
Searcher searcher = new IndexSearcher(dir, true);
Index: contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/misc/TestLengthNormModifier.java (working copy)
@@ -25,8 +25,8 @@
import org.apache.lucene.index.FieldNormModifier;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.IndexSearcher;
@@ -61,7 +61,8 @@
@Override
protected void setUp() throws Exception {
super.setUp();
- IndexWriter writer = new IndexWriter(store, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < NUM_DOCS; i++) {
Document d = new Document();
Index: contrib/misc/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java
===================================================================
--- contrib/misc/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (revision 921633)
+++ contrib/misc/src/test/org/apache/lucene/queryParser/complexPhrase/TestComplexPhraseQuery.java (working copy)
@@ -20,11 +20,12 @@
import java.util.HashSet;
import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@@ -113,7 +114,7 @@
protected void setUp() throws Exception {
super.setUp();
RAMDirectory rd = new RAMDirectory();
- IndexWriter w = new IndexWriter(rd, analyzer, MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
for (int i = 0; i < docsContent.length; i++) {
Document doc = new Document();
doc.add(new Field("name", docsContent[i].name, Field.Store.YES,
Index: contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java
===================================================================
--- contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (revision 921633)
+++ contrib/queries/src/test/org/apache/lucene/search/BooleanFilterTest.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -36,7 +37,8 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
//Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
addDoc(writer, "admin guest", "010", "20040101","Y");
Index: contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java
===================================================================
--- contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (revision 921633)
+++ contrib/queries/src/test/org/apache/lucene/search/DuplicateFilterTest.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.store.RAMDirectory;
@@ -41,7 +42,8 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
//Add series of docs with filterable fields : url, text and dates flags
addDoc(writer, "http://lucene.apache.org", "lucene 1.4.3 available", "20040101");
Index: contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java
===================================================================
--- contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (revision 921633)
+++ contrib/queries/src/test/org/apache/lucene/search/FuzzyLikeThisQueryTest.java (working copy)
@@ -25,8 +25,8 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -39,7 +39,7 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, analyzer,true, MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
//Add series of docs with misspelt names
addDoc(writer, "jonathon smythe","1");
Index: contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java
===================================================================
--- contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java (revision 921633)
+++ contrib/queries/src/test/org/apache/lucene/search/TermsFilterTest.java (working copy)
@@ -24,8 +24,8 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.OpenBitSet;
@@ -48,15 +48,14 @@
assertTrue("Must be cached",cachedFilters.contains(b));
b.addTerm(new Term("field1","c"));
assertFalse("Must not be cached",cachedFilters.contains(b));
-
}
- public void testMissingTerms() throws Exception
- {
+
+ public void testMissingTerms() throws Exception {
String fieldName="field1";
RAMDirectory rd=new RAMDirectory();
- IndexWriter w=new IndexWriter(rd,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),MaxFieldLength.UNLIMITED);
- for (int i = 0; i < 100; i++)
- {
+ IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
+ for (int i = 0; i < 100; i++) {
Document doc=new Document();
int term=i*10; //terms are units of 10;
doc.add(new Field(fieldName,""+term,Field.Store.YES,Field.Index.NOT_ANALYZED));
Index: contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java
===================================================================
--- contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (revision 921633)
+++ contrib/queries/src/test/org/apache/lucene/search/similar/TestMoreLikeThis.java (working copy)
@@ -28,7 +28,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
@@ -45,8 +45,7 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new StandardAnalyzer(TEST_VERSION_CURRENT),
- true, MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
// Add series of docs with specific information for MoreLikeThis
addDoc(writer, "lucene");
Index: contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java
===================================================================
--- contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (revision 921633)
+++ contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQPHelper.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.core.QueryNodeException;
import org.apache.lucene.queryParser.standard.config.DefaultOperatorAttribute.Operator;
import org.apache.lucene.search.BooleanClause;
@@ -319,8 +320,7 @@
public void testStopWordSearching() throws Exception {
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
Directory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, analyzer, true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("body", "blah the footest blah", Field.Store.NO,
Field.Index.ANALYZED));
Index: contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java
===================================================================
--- contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java (revision 921633)
+++ contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestMultiFieldQueryParserWrapper.java (working copy)
@@ -40,7 +40,11 @@
import org.apache.lucene.util.LuceneTestCase;
/**
- * Tests multi field query parsing using the {@link MultiFieldQueryParserWrapper}.
+ * Tests multi field query parsing using the
+ * {@link MultiFieldQueryParserWrapper}.
+ *
+ * @deprecated this tests test the deprecated MultiFieldQueryParserWrapper, so
+ * when the latter is gone, so should this test.
*/
public class TestMultiFieldQueryParserWrapper extends LuceneTestCase {
Index: contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java
===================================================================
--- contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (revision 921633)
+++ contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQPHelper.java (working copy)
@@ -51,6 +51,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.messages.MessageImpl;
import org.apache.lucene.queryParser.core.QueryNodeException;
@@ -571,8 +572,7 @@
public void testFarsiRangeCollating() throws Exception {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628", Field.Store.YES,
Field.Index.NOT_ANALYZED));
@@ -994,8 +994,7 @@
public void testLocalDateFormat() throws IOException, QueryNodeException {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
iw.close();
@@ -1193,7 +1192,7 @@
public void testMultiPhraseQuery() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new CannedAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new CannedAnalyzer()));
Document doc = new Document();
doc.add(new Field("field", "", Field.Store.NO, Field.Index.ANALYZED));
w.addDocument(doc);
Index: contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java
===================================================================
--- contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java (revision 921633)
+++ contrib/queryparser/src/test/org/apache/lucene/queryParser/standard/TestQueryParserWrapper.java (working copy)
@@ -78,6 +78,9 @@
* to use new {@link QueryParserWrapper} instead of the old query parser.
*
* Tests QueryParser.
+ *
+ * @deprecated this entire test case tests QueryParserWrapper which is
+ * deprecated. When QPW is gone, so will the test.
*/
public class TestQueryParserWrapper extends LocalizedTestCase {
Index: contrib/regex/src/test/org/apache/lucene/search/regex/TestRegexQuery.java
===================================================================
--- contrib/regex/src/test/org/apache/lucene/search/regex/TestRegexQuery.java (revision 921633)
+++ contrib/regex/src/test/org/apache/lucene/search/regex/TestRegexQuery.java (working copy)
@@ -19,6 +19,7 @@
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
@@ -40,8 +41,8 @@
super.setUp();
RAMDirectory directory = new RAMDirectory();
try {
- IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field(FN, "the quick brown fox jumps over the lazy dog", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);
Index: contrib/regex/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java
===================================================================
--- contrib/regex/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (revision 921633)
+++ contrib/regex/src/test/org/apache/lucene/search/regex/TestSpanRegexQuery.java (working copy)
@@ -25,7 +25,9 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiSearcher;
import org.apache.lucene.search.spans.SpanFirstQuery;
@@ -44,7 +46,8 @@
public void testSpanRegex() throws Exception {
RAMDirectory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
// doc.add(new Field("field", "the quick brown fox jumps over the lazy dog",
// Field.Store.NO, Field.Index.ANALYZED));
@@ -109,15 +112,15 @@
Field.Index.ANALYZED_NO_NORMS));
// creating first index writer
- IndexWriter writerA = new IndexWriter(indexStoreA, new StandardAnalyzer(TEST_VERSION_CURRENT),
- true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerA = new IndexWriter(indexStoreA, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
writerA.addDocument(lDoc);
writerA.optimize();
writerA.close();
// creating second index writer
- IndexWriter writerB = new IndexWriter(indexStoreB, new StandardAnalyzer(TEST_VERSION_CURRENT),
- true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writerB = new IndexWriter(indexStoreB, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
writerB.addDocument(lDoc2);
writerB.optimize();
writerB.close();
Index: contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java
===================================================================
--- contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java (revision 921633)
+++ contrib/remote/src/test/org/apache/lucene/search/TestRemoteCachingWrapperFilter.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
@@ -57,8 +58,9 @@
private static void startServer() throws Exception {
// construct an index
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(
+ TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
doc.add(new Field("type", "A", Field.Store.YES, Field.Index.ANALYZED));
Index: contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java
===================================================================
--- contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java (revision 921633)
+++ contrib/remote/src/test/org/apache/lucene/search/TestRemoteSearchable.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
@@ -58,7 +59,8 @@
private static void startServer() throws Exception {
// construct an index
RAMDirectory indexStore = new RAMDirectory();
- IndexWriter writer = new IndexWriter(indexStore,new SimpleAnalyzer(TEST_VERSION_CURRENT),true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("test", "test text", Field.Store.YES, Field.Index.ANALYZED));
doc.add(new Field("other", "other test text", Field.Store.YES, Field.Index.ANALYZED));
Index: contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java
===================================================================
--- contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java (revision 921633)
+++ contrib/remote/src/test/org/apache/lucene/search/TestRemoteSort.java (working copy)
@@ -35,6 +35,8 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -109,9 +111,10 @@
private Searcher getIndex (boolean even, boolean odd)
throws IOException {
RAMDirectory indexStore = new RAMDirectory ();
- IndexWriter writer = new IndexWriter (indexStore, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
- writer.setMergeFactor(1000);
+ IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(2));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(1000);
for (int i=0; i
@@ -139,8 +143,9 @@
synchronized (modifyCurrentIndexLock) {
ensureOpen();
if (!IndexReader.indexExists(spellIndexDir)) {
- IndexWriter writer = new IndexWriter(spellIndexDir, null, true,
- IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(spellIndexDir,
+ new IndexWriterConfig(Version.LUCENE_CURRENT,
+ new WhitespaceAnalyzer(Version.LUCENE_CURRENT)));
writer.close();
}
swapSearcher(spellIndexDir);
@@ -353,7 +358,10 @@
synchronized (modifyCurrentIndexLock) {
ensureOpen();
final Directory dir = this.spellIndex;
- final IndexWriter writer = new IndexWriter(dir, null, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ Version.LUCENE_CURRENT,
+ new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
writer.close();
swapSearcher(dir);
}
@@ -388,10 +396,8 @@
synchronized (modifyCurrentIndexLock) {
ensureOpen();
final Directory dir = this.spellIndex;
- final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(),
- IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMergeFactor(mergeFactor);
- writer.setRAMBufferSizeMB(ramMB);
+ final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_CURRENT, new WhitespaceAnalyzer(Version.LUCENE_CURRENT)).setRAMBufferSizeMB(ramMB));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(mergeFactor);
Iterator iter = dict.getWordsIterator();
while (iter.hasNext()) {
Index: contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java
===================================================================
--- contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (revision 921633)
+++ contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestLuceneDictionary.java (working copy)
@@ -25,6 +25,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
@@ -46,7 +47,7 @@
@Override
protected void setUp() throws Exception {
super.setUp();
- IndexWriter writer = new IndexWriter(store, new WhitespaceAnalyzer(LuceneTestCase.TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(store, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc;
Index: contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java
===================================================================
--- contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (revision 921633)
+++ contrib/spellchecker/src/test/org/apache/lucene/search/spell/TestSpellChecker.java (working copy)
@@ -32,6 +32,7 @@
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
@@ -54,7 +55,8 @@
//create a user index
userindex = new RAMDirectory();
- IndexWriter writer = new IndexWriter(userindex, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(userindex, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < 1000; i++) {
Document doc = new Document();
Index: contrib/surround/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java
===================================================================
--- contrib/surround/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (revision 921633)
+++ contrib/surround/src/test/org/apache/lucene/queryParser/surround/query/SingleFieldTestDb.java (working copy)
@@ -19,11 +19,12 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.Version;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
-import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
public class SingleFieldTestDb {
private Directory db;
@@ -35,9 +36,9 @@
db = new RAMDirectory();
docs = documents;
fieldName = fName;
- Analyzer analyzer = new WhitespaceAnalyzer();
- IndexWriter writer = new IndexWriter(db, analyzer, true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(db, new IndexWriterConfig(
+ Version.LUCENE_CURRENT,
+ new WhitespaceAnalyzer(Version.LUCENE_CURRENT)));
for (int j = 0; j < docs.length; j++) {
Document d = new Document();
d.add(new Field(fieldName, docs[j], Field.Store.NO, Field.Index.ANALYZED));
Index: contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java
===================================================================
--- contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java (revision 921633)
+++ contrib/swing/src/java/org/apache/lucene/swing/models/ListSearcher.java (working copy)
@@ -31,6 +31,7 @@
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.IndexSearcher;
@@ -87,7 +88,7 @@
private ListDataListener listModelListener;
public ListSearcher(ListModel newModel) {
- analyzer = new WhitespaceAnalyzer();
+ analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
setListModel(newModel);
listModelListener = new ListModelHandler();
newModel.addListDataListener(listModelListener);
@@ -117,7 +118,7 @@
try {
// recreate the RAMDirectory
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Version.LUCENE_CURRENT, analyzer));
// iterate through all rows
for (int row=0; row < listModel.getSize(); row++){
Index: contrib/swing/src/java/org/apache/lucene/swing/models/TableSearcher.java
===================================================================
--- contrib/swing/src/java/org/apache/lucene/swing/models/TableSearcher.java (revision 921633)
+++ contrib/swing/src/java/org/apache/lucene/swing/models/TableSearcher.java (working copy)
@@ -29,6 +29,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.MultiFieldQueryParser;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@@ -115,7 +116,7 @@
* @param tableModel The table model to decorate
*/
public TableSearcher(TableModel tableModel) {
- analyzer = new WhitespaceAnalyzer();
+ analyzer = new WhitespaceAnalyzer(Version.LUCENE_CURRENT);
tableModelListener = new TableModelHandler();
setTableModel(tableModel);
tableModel.addTableModelListener(tableModelListener);
@@ -163,7 +164,8 @@
try {
// recreate the RAMDirectory
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, analyzer, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ Version.LUCENE_CURRENT, analyzer));
// iterate through all rows
for (int row=0; row < tableModel.getRowCount(); row++){
Index: contrib/wordnet/src/java/org/apache/lucene/wordnet/Syns2Index.java
===================================================================
--- contrib/wordnet/src/java/org/apache/lucene/wordnet/Syns2Index.java (revision 921633)
+++ contrib/wordnet/src/java/org/apache/lucene/wordnet/Syns2Index.java (working copy)
@@ -35,6 +35,9 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LogMergePolicy;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@@ -245,8 +248,10 @@
try {
// override the specific index if it already exists
- IndexWriter writer = new IndexWriter(dir, ana, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(true); // why?
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ Version.LUCENE_CURRENT, ana).setOpenMode(OpenMode.CREATE));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true); // why?
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true); // why?
Iterator i1 = word2Nums.keySet().iterator();
while (i1.hasNext()) // for each word
{
Index: contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java
===================================================================
--- contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (revision 921633)
+++ contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestParser.java (working copy)
@@ -12,12 +12,14 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.Version;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -63,7 +65,7 @@
{
BufferedReader d = new BufferedReader(new InputStreamReader(TestParser.class.getResourceAsStream("reuters21578.txt")));
dir=new RAMDirectory();
- IndexWriter writer=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Version.LUCENE_24, analyzer));
String line = d.readLine();
while(line!=null)
{
Index: contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java
===================================================================
--- contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (revision 921633)
+++ contrib/xml-query-parser/src/test/org/apache/lucene/xmlparser/TestQueryTemplateManager.java (working copy)
@@ -11,6 +11,7 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.RAMDirectory;
@@ -141,7 +142,7 @@
//Create an index
RAMDirectory dir=new RAMDirectory();
- IndexWriter w=new IndexWriter(dir,analyzer,true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
for (int i = 0; i < docFieldValues.length; i++)
{
w.addDocument(getDocumentFromString(docFieldValues[i]));
Index: src/demo/org/apache/lucene/demo/IndexFiles.java
===================================================================
--- src/demo/org/apache/lucene/demo/IndexFiles.java (revision 921633)
+++ src/demo/org/apache/lucene/demo/IndexFiles.java (working copy)
@@ -19,6 +19,8 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@@ -55,7 +57,9 @@
Date start = new Date();
try {
- IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR), new StandardAnalyzer(Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(FSDirectory.open(INDEX_DIR),
+ new IndexWriterConfig(Version.LUCENE_CURRENT, new StandardAnalyzer(
+ Version.LUCENE_CURRENT)).setOpenMode(OpenMode.CREATE));
System.out.println("Indexing to directory '" +INDEX_DIR+ "'...");
indexDocs(writer, docDir);
System.out.println("Optimizing...");
Index: src/demo/org/apache/lucene/demo/IndexHTML.java
===================================================================
--- src/demo/org/apache/lucene/demo/IndexHTML.java (revision 921633)
+++ src/demo/org/apache/lucene/demo/IndexHTML.java (working copy)
@@ -21,8 +21,10 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermEnum;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
@@ -77,8 +79,10 @@
deleting = true;
indexDocs(root, index, create);
}
- writer = new IndexWriter(FSDirectory.open(index), new StandardAnalyzer(Version.LUCENE_CURRENT), create,
- new IndexWriter.MaxFieldLength(1000000));
+ writer = new IndexWriter(FSDirectory.open(index), new IndexWriterConfig(
+ Version.LUCENE_CURRENT, new StandardAnalyzer(Version.LUCENE_CURRENT))
+ .setMaxFieldLength(1000000).setOpenMode(
+ create ? OpenMode.CREATE : OpenMode.CREATE_OR_APPEND));
indexDocs(root, index, create); // add new docs
System.out.println("Optimizing index...");
Index: src/java/org/apache/lucene/index/DirectoryReader.java
===================================================================
--- src/java/org/apache/lucene/index/DirectoryReader.java (revision 921633)
+++ src/java/org/apache/lucene/index/DirectoryReader.java (working copy)
@@ -742,7 +742,7 @@
if (writeLock == null) {
Lock writeLock = directory.makeLock(IndexWriter.WRITE_LOCK_NAME);
- if (!writeLock.obtain(IndexWriter.WRITE_LOCK_TIMEOUT)) // obtain write lock
+ if (!writeLock.obtain(IndexWriterConfig.WRITE_LOCK_TIMEOUT)) // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
this.writeLock = writeLock;
Index: src/java/org/apache/lucene/index/DocumentsWriter.java
===================================================================
--- src/java/org/apache/lucene/index/DocumentsWriter.java (revision 921633)
+++ src/java/org/apache/lucene/index/DocumentsWriter.java (working copy)
@@ -138,7 +138,7 @@
private DocFieldProcessor docFieldProcessor;
PrintStream infoStream;
- int maxFieldLength = IndexWriter.DEFAULT_MAX_FIELD_LENGTH;
+ int maxFieldLength = IndexWriterConfig.UNLIMITED_FIELD_LENGTH;
Similarity similarity;
List newFiles;
@@ -223,7 +223,7 @@
abstract DocConsumer getChain(DocumentsWriter documentsWriter);
}
- static final IndexingChain DefaultIndexingChain = new IndexingChain() {
+ static final IndexingChain defaultIndexingChain = new IndexingChain() {
@Override
DocConsumer getChain(DocumentsWriter documentsWriter) {
@@ -270,22 +270,22 @@
// The max number of delete terms that can be buffered before
// they must be flushed to disk.
- private int maxBufferedDeleteTerms = IndexWriter.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
+ private int maxBufferedDeleteTerms = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
// How much RAM we can use before flushing. This is 0 if
// we are flushing by doc count instead.
- private long ramBufferSize = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
+ private long ramBufferSize = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024);
private long waitQueuePauseBytes = (long) (ramBufferSize*0.1);
private long waitQueueResumeBytes = (long) (ramBufferSize*0.05);
// If we've allocated 5% over our RAM budget, we then
// free down to 95%
- private long freeTrigger = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
- private long freeLevel = (long) (IndexWriter.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
+ private long freeTrigger = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*1.05);
+ private long freeLevel = (long) (IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB*1024*1024*0.95);
// Flush @ this number of docs. If ramBufferSize is
// non-zero we will flush by RAM usage instead.
- private int maxBufferedDocs = IndexWriter.DEFAULT_MAX_BUFFERED_DOCS;
+ private int maxBufferedDocs = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
private int flushedDocCount; // How many docs already flushed to index
@@ -304,7 +304,7 @@
DocumentsWriter(Directory directory, IndexWriter writer, IndexingChain indexingChain) throws IOException {
this.directory = directory;
this.writer = writer;
- this.similarity = writer.getSimilarity();
+ this.similarity = writer.getConfig().getSimilarity();
flushedDocCount = writer.maxDoc();
consumer = indexingChain.getChain(this);
@@ -342,8 +342,8 @@
/** Set how much RAM we can use before flushing. */
synchronized void setRAMBufferSizeMB(double mb) {
- if (mb == IndexWriter.DISABLE_AUTO_FLUSH) {
- ramBufferSize = IndexWriter.DISABLE_AUTO_FLUSH;
+ if (mb == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
+ ramBufferSize = IndexWriterConfig.DISABLE_AUTO_FLUSH;
waitQueuePauseBytes = 4*1024*1024;
waitQueueResumeBytes = 2*1024*1024;
} else {
@@ -356,7 +356,7 @@
}
synchronized double getRAMBufferSizeMB() {
- if (ramBufferSize == IndexWriter.DISABLE_AUTO_FLUSH) {
+ if (ramBufferSize == IndexWriterConfig.DISABLE_AUTO_FLUSH) {
return ramBufferSize;
} else {
return ramBufferSize/1024./1024.;
@@ -587,7 +587,7 @@
synchronized private void initFlushState(boolean onlyDocStore) {
initSegmentName(onlyDocStore);
- flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getTermIndexInterval());
+ flushState = new SegmentWriteState(this, directory, segment, docStoreSegment, numDocsInRAM, numDocsInStore, writer.getConfig().getTermIndexInterval());
}
/** Flush all pending docs to a new segment */
@@ -766,7 +766,7 @@
// always get N docs when we flush by doc count, even if
// > 1 thread is adding documents:
if (!flushPending &&
- maxBufferedDocs != IndexWriter.DISABLE_AUTO_FLUSH
+ maxBufferedDocs != IndexWriterConfig.DISABLE_AUTO_FLUSH
&& numDocsInRAM >= maxBufferedDocs) {
flushPending = true;
state.doFlushAfter = true;
@@ -928,9 +928,9 @@
}
synchronized boolean deletesFull() {
- return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
+ return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed + numBytesUsed) >= ramBufferSize) ||
- (maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
+ (maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
}
@@ -943,9 +943,9 @@
// too-frequent flushing of a long tail of tiny segments
// when merges (which always apply deletes) are
// infrequent.
- return (ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH &&
+ return (ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
(deletesInRAM.bytesUsed + deletesFlushed.bytesUsed) >= ramBufferSize/2) ||
- (maxBufferedDeleteTerms != IndexWriter.DISABLE_AUTO_FLUSH &&
+ (maxBufferedDeleteTerms != IndexWriterConfig.DISABLE_AUTO_FLUSH &&
((deletesInRAM.size() + deletesFlushed.size()) >= maxBufferedDeleteTerms));
}
@@ -1115,7 +1115,7 @@
}
synchronized boolean doBalanceRAM() {
- return ramBufferSize != IndexWriter.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
+ return ramBufferSize != IndexWriterConfig.DISABLE_AUTO_FLUSH && !bufferIsFull && (numBytesUsed+deletesInRAM.bytesUsed+deletesFlushed.bytesUsed >= ramBufferSize || numBytesAlloc >= freeTrigger);
}
/** Does the synchronized work to finish/flush the
Index: src/java/org/apache/lucene/index/IndexWriter.java
===================================================================
--- src/java/org/apache/lucene/index/IndexWriter.java (revision 921633)
+++ src/java/org/apache/lucene/index/IndexWriter.java (working copy)
@@ -19,7 +19,7 @@
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Document;
-import org.apache.lucene.index.DocumentsWriter.IndexingChain;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
@@ -29,6 +29,7 @@
import org.apache.lucene.store.BufferedIndexInput;
import org.apache.lucene.util.Constants;
import org.apache.lucene.util.ThreadInterruptedException;
+import org.apache.lucene.util.Version;
import java.io.IOException;
import java.io.Closeable;
@@ -179,10 +180,11 @@
/**
* Default value for the write lock timeout (1,000).
* @see #setDefaultWriteLockTimeout
+ * @deprecated use {@link IndexWriterConfig#WRITE_LOCK_TIMEOUT} instead
*/
- public static long WRITE_LOCK_TIMEOUT = 1000;
+ public static long WRITE_LOCK_TIMEOUT = IndexWriterConfig.WRITE_LOCK_TIMEOUT;
- private long writeLockTimeout = WRITE_LOCK_TIMEOUT;
+ private long writeLockTimeout;
/**
* Name of the write lock in the index.
@@ -191,36 +193,43 @@
/**
* Value to denote a flush trigger is disabled
+ * @deprecated use {@link IndexWriterConfig#DISABLE_AUTO_FLUSH} instead
*/
- public final static int DISABLE_AUTO_FLUSH = -1;
+ public final static int DISABLE_AUTO_FLUSH = IndexWriterConfig.DISABLE_AUTO_FLUSH;
/**
* Disabled by default (because IndexWriter flushes by RAM usage
* by default). Change using {@link #setMaxBufferedDocs(int)}.
+ * @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DOCS} instead.
*/
- public final static int DEFAULT_MAX_BUFFERED_DOCS = DISABLE_AUTO_FLUSH;
+ public final static int DEFAULT_MAX_BUFFERED_DOCS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS;
/**
* Default value is 16 MB (which means flush when buffered
* docs consume 16 MB RAM). Change using {@link #setRAMBufferSizeMB}.
+ * @deprecated use {@link IndexWriterConfig#DEFAULT_RAM_BUFFER_SIZE_MB} instead.
*/
- public final static double DEFAULT_RAM_BUFFER_SIZE_MB = 16.0;
+ public final static double DEFAULT_RAM_BUFFER_SIZE_MB = IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB;
/**
* Disabled by default (because IndexWriter flushes by RAM usage
* by default). Change using {@link #setMaxBufferedDeleteTerms(int)}.
+ * @deprecated use {@link IndexWriterConfig#DEFAULT_MAX_BUFFERED_DELETE_TERMS} instead
*/
- public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
+ public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS;
/**
* Default value is 10,000. Change using {@link #setMaxFieldLength(int)}.
+ *
+ * @deprecated see {@link IndexWriterConfig}
*/
public final static int DEFAULT_MAX_FIELD_LENGTH = 10000;
/**
* Default value is 128. Change using {@link #setTermIndexInterval(int)}.
+ * @deprecated use {@link IndexWriterConfig#DEFAULT_TERM_INDEX_INTERVAL} instead.
*/
- public final static int DEFAULT_TERM_INDEX_INTERVAL = 128;
+ public final static int DEFAULT_TERM_INDEX_INTERVAL = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
/**
* Absolute hard maximum length for a term. If a term
@@ -244,10 +253,11 @@
private int messageID = -1;
volatile private boolean hitOOM;
- private Directory directory; // where this index resides
- private Analyzer analyzer; // how to analyze text
+ private final Directory directory; // where this index resides
+ private final Analyzer analyzer; // how to analyze text
- private Similarity similarity = Similarity.getDefault(); // how to normalize
+ // TODO (4.0): this should be made final once the setter is out
+ private /*final*/Similarity similarity = Similarity.getDefault(); // how to normalize
private volatile long changeCount; // increments every time a change is completed
private long lastCommitChangeCount; // last changeCount that was committed
@@ -270,7 +280,8 @@
private Lock writeLock;
- private int termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL;
+ // TODO (4.0): this should be made final once the setter is out
+ private /*final*/int termIndexInterval;
private boolean closed;
private boolean closing;
@@ -280,7 +291,8 @@
private HashSet mergingSegments = new HashSet();
private MergePolicy mergePolicy = new LogByteSizeMergePolicy(this);
- private MergeScheduler mergeScheduler = new ConcurrentMergeScheduler();
+ // TODO (4.0): this should be made final once the setter is removed
+ private /*final*/MergeScheduler mergeScheduler;
private LinkedList pendingMerges = new LinkedList();
private Set runningMerges = new HashSet();
private List mergeExceptions = new ArrayList();
@@ -307,7 +319,11 @@
// deletes, doing merges, and reopening near real-time
// readers.
private volatile boolean poolReaders;
-
+
+ // The instance that was passed to the constructor. It is saved only in order
+ // to allow users to query an IndexWriter settings.
+ private final IndexWriterConfig config;
+
/**
* Expert: returns a readonly reader, covering all
* committed as well as un-committed changes to the index.
@@ -777,19 +793,29 @@
* Otherwise an IllegalArgumentException is thrown.
*
* @see #setUseCompoundFile(boolean)
+ * @deprecated use {@link LogMergePolicy#getUseCompoundDocStore()} and
+ * {@link LogMergePolicy#getUseCompoundFile()} directly.
*/
public boolean getUseCompoundFile() {
return getLogMergePolicy().getUseCompoundFile();
}
- /** Setting to turn on usage of a compound file. When on,
- * multiple files for each segment are merged into a
- * single file when a new segment is flushed.
- *
- * Note that this method is a convenience method: it
- * just calls mergePolicy.setUseCompoundFile as long as
- * mergePolicy is an instance of {@link LogMergePolicy}.
- * Otherwise an IllegalArgumentException is thrown.
+ /**
+ *
+ * Setting to turn on usage of a compound file. When on, multiple files for
+ * each segment are merged into a single file when a new segment is flushed.
+ *
+ *
+ *
+ * Note that this method is a convenience method: it just calls
+ * mergePolicy.setUseCompoundFile as long as mergePolicy is an instance of
+ * {@link LogMergePolicy}. Otherwise an IllegalArgumentException is thrown.
+ *
+ *
+ * @deprecated use {@link LogMergePolicy#setUseCompoundDocStore(boolean)} and
+ * {@link LogMergePolicy#setUseCompoundFile(boolean)} directly.
+ * Note that this method set the given value on both, therefore
+ * you should consider doing the same.
*/
public void setUseCompoundFile(boolean value) {
getLogMergePolicy().setUseCompoundFile(value);
@@ -799,20 +825,25 @@
/** Expert: Set the Similarity implementation used by this IndexWriter.
*
* @see Similarity#setDefault(Similarity)
+ * @deprecated use {@link IndexWriterConfig#setSimilarity(Similarity)} instead
*/
public void setSimilarity(Similarity similarity) {
ensureOpen();
this.similarity = similarity;
docWriter.setSimilarity(similarity);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setSimilarity(similarity);
}
/** Expert: Return the Similarity implementation used by this IndexWriter.
*
* This defaults to the current value of {@link Similarity#getDefault()}.
+ * @deprecated use {@link IndexWriterConfig#getSimilarity()} instead
*/
public Similarity getSimilarity() {
ensureOpen();
- return this.similarity;
+ return similarity;
}
/** Expert: Set the interval between indexed terms. Large values cause less
@@ -835,15 +866,20 @@
* must be scanned for each random term access.
*
* @see #DEFAULT_TERM_INDEX_INTERVAL
+ * @deprecated use {@link IndexWriterConfig#setTermIndexInterval(int)}
*/
public void setTermIndexInterval(int interval) {
ensureOpen();
this.termIndexInterval = interval;
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setTermIndexInterval(interval);
}
/** Expert: Return the interval between indexed terms.
*
* @see #setTermIndexInterval(int)
+ * @deprecated use {@link IndexWriterConfig#getTermIndexInterval()}
*/
public int getTermIndexInterval() {
// We pass false because this method is called by SegmentMerger while we are in the process of closing
@@ -872,10 +908,13 @@
* if it does not exist and create is
* false or if there is any other low-level
* IO error
+ * @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, boolean create, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, create, null, mfl.getLimit(), null, null);
+ this(d, new IndexWriterConfig(Version.LUCENE_31, a).setOpenMode(
+ create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
+ mfl.getLimit()));
}
/**
@@ -895,10 +934,12 @@
* @throws IOException if the directory cannot be
* read/written to or if there is any other low-level
* IO error
+ * @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, null, mfl.getLimit(), null, null);
+ this(d, new IndexWriterConfig(Version.LUCENE_31, a)
+ .setMaxFieldLength(mfl.getLimit()));
}
/**
@@ -918,10 +959,12 @@
* @throws IOException if the directory cannot be
* read/written to or if there is any other low-level
* IO error
+ * @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, deletionPolicy, mfl.getLimit(), null, null);
+ this(d, new IndexWriterConfig(Version.LUCENE_31, a).setMaxFieldLength(
+ mfl.getLimit()).setIndexDeletionPolicy(deletionPolicy));
}
/**
@@ -947,46 +990,16 @@
* if it does not exist and create is
* false or if there is any other low-level
* IO error
+ * @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl)
throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, create, deletionPolicy, mfl.getLimit(), null, null);
+ this(d, new IndexWriterConfig(Version.LUCENE_31, a).setOpenMode(
+ create ? OpenMode.CREATE : OpenMode.APPEND).setMaxFieldLength(
+ mfl.getLimit()).setIndexDeletionPolicy(deletionPolicy));
}
/**
- * Expert: constructs an IndexWriter with a custom {@link
- * IndexDeletionPolicy} and {@link IndexingChain},
- * for the index in d.
- * Text will be analyzed with a. If
- * create is true, then a new, empty index
- * will be created in d, replacing the index
- * already there, if any.
- *
- * @param d the index directory
- * @param a the analyzer to use
- * @param create true to create the index or overwrite
- * the existing one; false to append to the existing
- * index
- * @param deletionPolicy see above
- * @param mfl whether or not to limit field lengths, value is in number of terms/tokens. See {@link org.apache.lucene.index.IndexWriter.MaxFieldLength}.
- * @param indexingChain the {@link DocConsumer} chain to be used to
- * process documents
- * @param commit which commit to open
- * @throws CorruptIndexException if the index is corrupt
- * @throws LockObtainFailedException if another writer
- * has this index open (write.lock could not
- * be obtained)
- * @throws IOException if the directory cannot be read/written to, or
- * if it does not exist and create is
- * false or if there is any other low-level
- * IO error
- */
- IndexWriter(Directory d, Analyzer a, boolean create, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexingChain indexingChain, IndexCommit commit)
- throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, create, deletionPolicy, mfl.getLimit(), indexingChain, commit);
- }
-
- /**
* Expert: constructs an IndexWriter on specific commit
* point, with a custom {@link IndexDeletionPolicy}, for
* the index in d. Text will be analyzed
@@ -1017,44 +1030,75 @@
* if it does not exist and create is
* false or if there is any other low-level
* IO error
+ * @deprecated use {@link #IndexWriter(Directory, IndexWriterConfig)} instead
*/
public IndexWriter(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy, MaxFieldLength mfl, IndexCommit commit)
throws CorruptIndexException, LockObtainFailedException, IOException {
- init(d, a, false, deletionPolicy, mfl.getLimit(), null, commit);
+ this(d, new IndexWriterConfig(Version.LUCENE_31, a)
+ .setOpenMode(OpenMode.APPEND).setMaxFieldLength(mfl.getLimit())
+ .setIndexDeletionPolicy(deletionPolicy).setIndexCommit(commit));
}
- private void init(Directory d, Analyzer a, IndexDeletionPolicy deletionPolicy,
- int maxFieldLength, IndexingChain indexingChain, IndexCommit commit)
- throws CorruptIndexException, LockObtainFailedException, IOException {
- if (IndexReader.indexExists(d)) {
- init(d, a, false, deletionPolicy, maxFieldLength, indexingChain, commit);
+ /**
+ * Constructs a new IndexWriter per the settings given in conf.
+ * Note that the passed in {@link IndexWriterConfig} is cloned and thus making
+ * changes to it after IndexWriter has been instantiated will not affect
+ * IndexWriter. Additionally, calling {@link #getConfig()} and changing the
+ * parameters does not affect that IndexWriter instance.
+ *
+ * NOTE: by default, {@link IndexWriterConfig#getMaxFieldLength()}
+ * returns {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH}. Pay attention to
+ * whether this setting fits your application.
+ *
+ * @param d
+ * the index directory. The index is either created or appended
+ * according conf.getOpenMode().
+ * @param conf
+ * the configuration settings according to which IndexWriter should
+ * be initalized.
+ * @throws CorruptIndexException
+ * if the index is corrupt
+ * @throws LockObtainFailedException
+ * if another writer has this index open (write.lock
+ * could not be obtained)
+ * @throws IOException
+ * if the directory cannot be read/written to, or if it does not
+ * exist and conf.getOpenMode() is
+ * OpenMode.APPEND or if there is any other low-level
+ * IO error
+ */
+ public IndexWriter(Directory d, IndexWriterConfig conf)
+ throws CorruptIndexException, LockObtainFailedException, IOException {
+ config = (IndexWriterConfig) conf.clone();
+ directory = d;
+ analyzer = conf.getAnalyzer();
+ setMessageID(defaultInfoStream);
+ maxFieldLength = conf.getMaxFieldLength();
+ termIndexInterval = conf.getTermIndexInterval();
+ writeLockTimeout = conf.getWriteLockTimeout();
+ similarity = conf.getSimilarity();
+ mergeScheduler = conf.getMergeScheduler();
+ mergedSegmentWarmer = conf.getMergedSegmentWarmer();
+
+ OpenMode mode = conf.getOpenMode();
+ boolean create;
+ if (mode == OpenMode.CREATE) {
+ create = true;
+ } else if (mode == OpenMode.APPEND) {
+ create = false;
} else {
- init(d, a, true, deletionPolicy, maxFieldLength, indexingChain, commit);
+ // CREATE_OR_APPEND - create only if an index does not exist
+ create = !IndexReader.indexExists(directory);
}
- }
- private void init(Directory d, Analyzer a, final boolean create,
- IndexDeletionPolicy deletionPolicy, int maxFieldLength,
- IndexingChain indexingChain, IndexCommit commit)
- throws CorruptIndexException, LockObtainFailedException, IOException {
-
- directory = d;
- analyzer = a;
- setMessageID(defaultInfoStream);
- this.maxFieldLength = maxFieldLength;
-
- if (indexingChain == null)
- indexingChain = DocumentsWriter.DefaultIndexingChain;
-
if (create) {
// Clear the write lock in case it's leftover:
directory.clearLock(WRITE_LOCK_NAME);
}
- Lock writeLock = directory.makeLock(WRITE_LOCK_NAME);
+ writeLock = directory.makeLock(WRITE_LOCK_NAME);
if (!writeLock.obtain(writeLockTimeout)) // obtain write lock
throw new LockObtainFailedException("Index locked for write: " + writeLock);
- this.writeLock = writeLock; // save it
try {
if (create) {
@@ -1085,6 +1129,7 @@
} else {
segmentInfos.read(directory);
+ IndexCommit commit = conf.getIndexCommit();
if (commit != null) {
// Swap out all segments, but, keep metadata in
// SegmentInfos, like version & generation, to
@@ -1108,14 +1153,14 @@
setRollbackSegmentInfos(segmentInfos);
- docWriter = new DocumentsWriter(directory, this, indexingChain);
+ docWriter = new DocumentsWriter(directory, this, conf.getIndexingChain());
docWriter.setInfoStream(infoStream);
docWriter.setMaxFieldLength(maxFieldLength);
// Default deleter (for backwards compatibility) is
// KeepOnlyLastCommitDeleter:
deleter = new IndexFileDeleter(directory,
- deletionPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : deletionPolicy,
+ conf.getIndexDeletionPolicy(),
segmentInfos, infoStream, docWriter);
if (deleter.startingCommitDeleted)
@@ -1125,20 +1170,22 @@
// segments_N file.
changeCount++;
+ docWriter.setMaxBufferedDeleteTerms(conf.getMaxBufferedDeleteTerms());
+ docWriter.setRAMBufferSizeMB(conf.getRAMBufferSizeMB());
+ docWriter.setMaxBufferedDocs(conf.getMaxBufferedDocs());
pushMaxBufferedDocs();
if (infoStream != null) {
- message("init: create=" + create);
messageState();
}
} catch (IOException e) {
- this.writeLock.release();
- this.writeLock = null;
+ writeLock.release();
+ writeLock = null;
throw e;
}
}
-
+
private synchronized void setRollbackSegmentInfos(SegmentInfos infos) {
rollbackSegmentInfos = (SegmentInfos) infos.clone();
assert !rollbackSegmentInfos.hasExternalSegments(directory);
@@ -1149,6 +1196,19 @@
}
/**
+ * Returns the {@link IndexWriterConfig} that was passed to
+ * {@link #IndexWriter(Directory, IndexWriterConfig)}. This allows querying
+ * IndexWriter's settings.
+ *
+ * NOTE: setting any parameter on the returned instance has not effect
+ * on the IndexWriter instance. If you need to change those settings after
+ * IndexWriter has been created, you need to instantiate a new IndexWriter.
+ */
+ public IndexWriterConfig getConfig() {
+ return config;
+ }
+
+ /**
* Expert: set the merge policy used by this writer.
*/
public void setMergePolicy(MergePolicy mp) {
@@ -1175,6 +1235,7 @@
/**
* Expert: set the merge scheduler used by this writer.
+ * @deprecated use {@link IndexWriterConfig#setMergeScheduler(MergeScheduler)} instead
*/
synchronized public void setMergeScheduler(MergeScheduler mergeScheduler) throws CorruptIndexException, IOException {
ensureOpen();
@@ -1188,12 +1249,16 @@
this.mergeScheduler = mergeScheduler;
if (infoStream != null)
message("setMergeScheduler " + mergeScheduler);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setMergeScheduler(mergeScheduler);
}
/**
- * Expert: returns the current MergePolicy in use by this
+ * Expert: returns the current MergeScheduler in use by this
* writer.
- * @see #setMergePolicy
+ * @see #setMergeScheduler(MergeScheduler)
+ * @deprecated use {@link IndexWriterConfig#getMergeScheduler()} instead
*/
public MergeScheduler getMergeScheduler() {
ensureOpen();
@@ -1219,6 +1284,7 @@
* LogByteSizeMergePolicy}) also allows you to set this
* limit by net size (in MB) of the segment, using {@link
* LogByteSizeMergePolicy#setMaxMergeMB}.
+ * @deprecated use {@link LogMergePolicy#setMaxMergeDocs(int)} directly.
*/
public void setMaxMergeDocs(int maxMergeDocs) {
getLogMergePolicy().setMaxMergeDocs(maxMergeDocs);
@@ -1234,6 +1300,7 @@
* Otherwise an IllegalArgumentException is thrown.
*
* @see #setMaxMergeDocs
+ * @deprecated use {@link LogMergePolicy#getMaxMergeDocs()} directly.
*/
public int getMaxMergeDocs() {
return getLogMergePolicy().getMaxMergeDocs();
@@ -1252,6 +1319,7 @@
* is your memory, but you should anticipate an OutOfMemoryError.
* By default, no more than {@link #DEFAULT_MAX_FIELD_LENGTH} terms
* will be indexed for a field.
+ * @deprecated use {@link IndexWriterConfig#setMaxFieldLength(int)} instead
*/
public void setMaxFieldLength(int maxFieldLength) {
ensureOpen();
@@ -1259,12 +1327,16 @@
docWriter.setMaxFieldLength(maxFieldLength);
if (infoStream != null)
message("setMaxFieldLength " + maxFieldLength);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setMaxFieldLength(maxFieldLength);
}
/**
* Returns the maximum number of terms that will be
* indexed for a single field in a document.
* @see #setMaxFieldLength
+ * @deprecated use {@link IndexWriterConfig#getMaxFieldLength()} instead
*/
public int getMaxFieldLength() {
ensureOpen();
@@ -1289,6 +1361,7 @@
* enabled but smaller than 2, or it disables maxBufferedDocs
* when ramBufferSize is already disabled
* @see #setRAMBufferSizeMB
+ * @deprecated use {@link IndexWriterConfig#setMaxBufferedDocs(int)} instead.
*/
public void setMaxBufferedDocs(int maxBufferedDocs) {
ensureOpen();
@@ -1303,6 +1376,9 @@
pushMaxBufferedDocs();
if (infoStream != null)
message("setMaxBufferedDocs " + maxBufferedDocs);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setMaxBufferedDocs(maxBufferedDocs);
}
/**
@@ -1329,6 +1405,7 @@
* Returns the number of buffered added documents that will
* trigger a flush if enabled.
* @see #setMaxBufferedDocs
+ * @deprecated use {@link IndexWriterConfig#getMaxBufferedDocs()} instead.
*/
public int getMaxBufferedDocs() {
ensureOpen();
@@ -1372,6 +1449,7 @@
* @throws IllegalArgumentException if ramBufferSize is
* enabled but non-positive, or it disables ramBufferSize
* when maxBufferedDocs is already disabled
+ * @deprecated use {@link IndexWriterConfig#setRAMBufferSizeMB(double)} instead.
*/
public void setRAMBufferSizeMB(double mb) {
if (mb > 2048.0) {
@@ -1386,10 +1464,14 @@
docWriter.setRAMBufferSizeMB(mb);
if (infoStream != null)
message("setRAMBufferSizeMB " + mb);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setRAMBufferSizeMB(mb);
}
/**
* Returns the value set by {@link #setRAMBufferSizeMB} if enabled.
+ * @deprecated use {@link IndexWriterConfig#getRAMBufferSizeMB()} instead.
*/
public double getRAMBufferSizeMB() {
return docWriter.getRAMBufferSizeMB();
@@ -1406,6 +1488,7 @@
* @throws IllegalArgumentException if maxBufferedDeleteTerms
* is enabled but smaller than 1
* @see #setRAMBufferSizeMB
+ * @deprecated use {@link IndexWriterConfig#setMaxBufferedDeleteTerms(int)} instead.
*/
public void setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
ensureOpen();
@@ -1416,12 +1499,16 @@
docWriter.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
if (infoStream != null)
message("setMaxBufferedDeleteTerms " + maxBufferedDeleteTerms);
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setMaxBufferedDeleteTerms(maxBufferedDeleteTerms);
}
/**
* Returns the number of buffered deleted terms that will
* trigger a flush if enabled.
* @see #setMaxBufferedDeleteTerms
+ * @deprecated use {@link IndexWriterConfig#getMaxBufferedDeleteTerms()} instead
*/
public int getMaxBufferedDeleteTerms() {
ensureOpen();
@@ -1442,6 +1529,7 @@
* Otherwise an IllegalArgumentException is thrown.
*
* This must never be less than 2. The default value is 10.
+ * @deprecated use {@link LogMergePolicy#setMergeFactor(int)} directly.
*/
public void setMergeFactor(int mergeFactor) {
getLogMergePolicy().setMergeFactor(mergeFactor);
@@ -1458,6 +1546,7 @@
* Otherwise an IllegalArgumentException is thrown.
*
* @see #setMergeFactor
+ * @deprecated use {@link LogMergePolicy#getMergeFactor()} directly.
*/
public int getMergeFactor() {
return getLogMergePolicy().getMergeFactor();
@@ -1494,15 +1583,11 @@
}
private void messageState() {
- message("setInfoStream: dir=" + directory +
- " mergePolicy=" + mergePolicy +
- " mergeScheduler=" + mergeScheduler +
- " ramBufferSizeMB=" + docWriter.getRAMBufferSizeMB() +
- " maxBufferedDocs=" + docWriter.getMaxBufferedDocs() +
- " maxBuffereDeleteTerms=" + docWriter.getMaxBufferedDeleteTerms() +
- " maxFieldLength=" + maxFieldLength +
- " index=" + segString() +
- " version=" + Constants.LUCENE_VERSION);
+ message("\ndir=" + directory + "\n" +
+ "mergePolicy=" + mergePolicy + "\n" +
+ "index=" + segString() + "\n" +
+ "version=" + Constants.LUCENE_VERSION + "\n" +
+ config.toString());
}
/**
@@ -1522,15 +1607,20 @@
/**
* Sets the maximum time to wait for a write lock (in milliseconds) for this instance of IndexWriter. @see
* @see #setDefaultWriteLockTimeout to change the default value for all instances of IndexWriter.
+ * @deprecated use {@link IndexWriterConfig#setWriteLockTimeout(long)} instead
*/
public void setWriteLockTimeout(long writeLockTimeout) {
ensureOpen();
this.writeLockTimeout = writeLockTimeout;
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setWriteLockTimeout(writeLockTimeout);
}
/**
* Returns allowed timeout when acquiring the write lock.
* @see #setWriteLockTimeout
+ * @deprecated use {@link IndexWriterConfig#getWriteLockTimeout()}
*/
public long getWriteLockTimeout() {
ensureOpen();
@@ -1540,18 +1630,20 @@
/**
* Sets the default (for any instance of IndexWriter) maximum time to wait for a write lock (in
* milliseconds).
+ * @deprecated use {@link IndexWriterConfig#setDefaultWriteLockTimeout(long)} instead
*/
public static void setDefaultWriteLockTimeout(long writeLockTimeout) {
- IndexWriter.WRITE_LOCK_TIMEOUT = writeLockTimeout;
+ IndexWriterConfig.setDefaultWriteLockTimeout(writeLockTimeout);
}
/**
* Returns default write lock timeout for newly
* instantiated IndexWriters.
* @see #setDefaultWriteLockTimeout
+ * @deprecated use {@link IndexWriterConfig#getDefaultWriteLockTimeout()} instead
*/
public static long getDefaultWriteLockTimeout() {
- return IndexWriter.WRITE_LOCK_TIMEOUT;
+ return IndexWriterConfig.getDefaultWriteLockTimeout();
}
/**
@@ -4785,9 +4877,13 @@
}
/**
- * Specifies maximum field length (in number of tokens/terms) in {@link IndexWriter} constructors.
- * {@link #setMaxFieldLength(int)} overrides the value set by
- * the constructor.
+ * Specifies maximum field length (in number of tokens/terms) in
+ * {@link IndexWriter} constructors. {@link #setMaxFieldLength(int)} overrides
+ * the value set by the constructor.
+ *
+ * @deprecated use {@link IndexWriterConfig} and pass
+ * {@link IndexWriterConfig#UNLIMITED_FIELD_LENGTH} or your own
+ * value.
*/
public static final class MaxFieldLength {
@@ -4854,14 +4950,25 @@
private IndexReaderWarmer mergedSegmentWarmer;
- /** Set the merged segment warmer. See {@link
- * IndexReaderWarmer}. */
+ /**
+ * Set the merged segment warmer. See {@link IndexReaderWarmer}.
+ *
+ * @deprecated use
+ * {@link IndexWriterConfig#setMergedSegmentWarmer(IndexReaderWarmer)}
+ * instead.
+ */
public void setMergedSegmentWarmer(IndexReaderWarmer warmer) {
mergedSegmentWarmer = warmer;
+ // Required so config.getSimilarity returns the right value. But this will
+ // go away together with the method in 4.0.
+ config.setMergedSegmentWarmer(mergedSegmentWarmer);
}
- /** Returns the current merged segment warmer. See {@link
- * IndexReaderWarmer}. */
+ /**
+ * Returns the current merged segment warmer. See {@link IndexReaderWarmer}.
+ *
+ * @deprecated use {@link IndexWriterConfig#getMergedSegmentWarmer()} instead.
+ */
public IndexReaderWarmer getMergedSegmentWarmer() {
return mergedSegmentWarmer;
}
Index: src/java/org/apache/lucene/index/IndexWriterConfig.java
===================================================================
--- src/java/org/apache/lucene/index/IndexWriterConfig.java (revision 0)
+++ src/java/org/apache/lucene/index/IndexWriterConfig.java (revision 0)
@@ -0,0 +1,518 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.DocumentsWriter.IndexingChain;
+import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
+import org.apache.lucene.search.Similarity;
+import org.apache.lucene.util.Version;
+
+/**
+ * Holds all the configuration of {@link IndexWriter}. This object is only used
+ * while constructing a new IndexWriter. Those settings cannot be changed
+ * afterwards, except instantiating a new IndexWriter.
+ *
+ * All setter methods return {@link IndexWriterConfig} to allow chaining
+ * settings conveniently. Thus someone can do:
+ *
+ *
+ * IndexWriterConfig conf = new IndexWriterConfig(analyzer);
+ * conf.setter1().setter2();
+ *
+ *
+ * @since 3.1
+ */
+public final class IndexWriterConfig implements Cloneable {
+
+ public static final int UNLIMITED_FIELD_LENGTH = Integer.MAX_VALUE;
+
+ /**
+ * Specifies the open mode for {@link IndexWriter}:
+ *
+ * {@link #CREATE} - creates a new index or overwrites an existing one.
+ * {@link #CREATE_OR_APPEND} - creates a new index if one does not exist,
+ * otherwise it opens the index and documents will be appended.
+ * {@link #APPEND} - opens an existing index.
+ *
+ */
+ public static enum OpenMode { CREATE, APPEND, CREATE_OR_APPEND }
+
+ /** Default value is 128. Change using {@link #setTermIndexInterval(int)}. */
+ public static final int DEFAULT_TERM_INDEX_INTERVAL = 128;
+
+ /** Denotes a flush trigger is disabled. */
+ public final static int DISABLE_AUTO_FLUSH = -1;
+
+ /** Disabled by default (because IndexWriter flushes by RAM usage by default). */
+ public final static int DEFAULT_MAX_BUFFERED_DELETE_TERMS = DISABLE_AUTO_FLUSH;
+
+ /** Disabled by default (because IndexWriter flushes by RAM usage by default). */
+ public final static int DEFAULT_MAX_BUFFERED_DOCS = DISABLE_AUTO_FLUSH;
+
+ /**
+ * Default value is 16 MB (which means flush when buffered docs consume
+ * approximately 16 MB RAM).
+ */
+ public final static double DEFAULT_RAM_BUFFER_SIZE_MB = 16.0;
+
+ /**
+ * Default value for the write lock timeout (1,000 ms).
+ *
+ * @see #setDefaultWriteLockTimeout(long)
+ */
+ public static long WRITE_LOCK_TIMEOUT = 1000;
+
+ /**
+ * Sets the default (for any instance) maximum time to wait for a write lock
+ * (in milliseconds).
+ */
+ public static void setDefaultWriteLockTimeout(long writeLockTimeout) {
+ WRITE_LOCK_TIMEOUT = writeLockTimeout;
+ }
+
+ /**
+ * Returns the default write lock timeout for newly instantiated
+ * IndexWriterConfigs.
+ *
+ * @see #setDefaultWriteLockTimeout(long)
+ */
+ public static long getDefaultWriteLockTimeout() {
+ return WRITE_LOCK_TIMEOUT;
+ }
+
+ private Analyzer analyzer;
+ private IndexDeletionPolicy delPolicy;
+ private IndexCommit commit;
+ private OpenMode openMode;
+ private int maxFieldLength;
+ private Similarity similarity;
+ private int termIndexInterval;
+ private MergeScheduler mergeScheduler;
+ private long writeLockTimeout;
+ private int maxBufferedDeleteTerms;
+ private double ramBufferSizeMB;
+ private int maxBufferedDocs;
+ private IndexingChain indexingChain;
+ private IndexReaderWarmer mergedSegmentWarmer;
+
+ // required for clone
+ private Version matchVersion;
+
+ /**
+ * Creates a new config that with defaults that match the specified
+ * {@link Version} as well as the default {@link Analyzer}. {@link Version} is
+ * a placeholder for future changes. The default settings are relevant to 3.1
+ * and before. In the future, if different settings will apply to different
+ * versions, they will be documented here.
+ */
+ public IndexWriterConfig(Version matchVersion, Analyzer analyzer) {
+ this.matchVersion = matchVersion;
+ this.analyzer = analyzer;
+ delPolicy = new KeepOnlyLastCommitDeletionPolicy();
+ commit = null;
+ openMode = OpenMode.CREATE_OR_APPEND;
+ maxFieldLength = UNLIMITED_FIELD_LENGTH;
+ similarity = Similarity.getDefault();
+ termIndexInterval = DEFAULT_TERM_INDEX_INTERVAL;
+ mergeScheduler = new ConcurrentMergeScheduler();
+ writeLockTimeout = WRITE_LOCK_TIMEOUT;
+ maxBufferedDeleteTerms = DEFAULT_MAX_BUFFERED_DELETE_TERMS;
+ ramBufferSizeMB = DEFAULT_RAM_BUFFER_SIZE_MB;
+ maxBufferedDocs = DEFAULT_MAX_BUFFERED_DOCS;
+ indexingChain = DocumentsWriter.defaultIndexingChain;
+ mergedSegmentWarmer = null;
+ }
+
+ @Override
+ public Object clone() {
+ // Shallow clone is the only thing that's possible, since parameters like
+ // analyzer, index commit etc. do not implemnt Cloneable.
+ try {
+ return super.clone();
+ } catch (CloneNotSupportedException e) {
+ // should not happen
+ throw new RuntimeException(e);
+ }
+ }
+
+ /** Returns the default analyzer to use for indexing documents. */
+ public Analyzer getAnalyzer() {
+ return analyzer;
+ }
+
+ /** Specifies {@link OpenMode} of that index. */
+ public IndexWriterConfig setOpenMode(OpenMode openMode) {
+ this.openMode = openMode;
+ return this;
+ }
+
+ /** Returns the {@link OpenMode} set by {@link #setOpenMode(OpenMode)}. */
+ public OpenMode getOpenMode() {
+ return openMode;
+ }
+
+ /**
+ * Expert: allows an optional {@link IndexDeletionPolicy} implementation to be
+ * specified. You can use this to control when prior commits are deleted from
+ * the index. The default policy is {@link KeepOnlyLastCommitDeletionPolicy}
+ * which removes all prior commits as soon as a new commit is done (this
+ * matches behavior before 2.2). Creating your own policy can allow you to
+ * explicitly keep previous "point in time" commits alive in the index for
+ * some time, to allow readers to refresh to the new commit without having the
+ * old commit deleted out from under them. This is necessary on filesystems
+ * like NFS that do not support "delete on last close" semantics, which
+ * Lucene's "point in time" search normally relies on.
+ *
+ * NOTE: the deletion policy cannot be null. If null is
+ * passed, the deletion policy will be set to the default.
+ */
+ public IndexWriterConfig setIndexDeletionPolicy(IndexDeletionPolicy delPolicy) {
+ this.delPolicy = delPolicy == null ? new KeepOnlyLastCommitDeletionPolicy() : delPolicy;
+ return this;
+ }
+
+ /**
+ * Returns the {@link IndexDeletionPolicy} specified in
+ * {@link #setIndexDeletionPolicy(IndexDeletionPolicy)} or the default
+ * {@link KeepOnlyLastCommitDeletionPolicy}/
+ */
+ public IndexDeletionPolicy getIndexDeletionPolicy() {
+ return delPolicy;
+ }
+
+ /**
+ * The maximum number of terms that will be indexed for a single field in a
+ * document. This limits the amount of memory required for indexing, so that
+ * collections with very large files will not crash the indexing process by
+ * running out of memory. This setting refers to the number of running terms,
+ * not to the number of different terms.
+ *
+ * NOTE: this silently truncates large documents, excluding from the
+ * index all terms that occur further in the document. If you know your source
+ * documents are large, be sure to set this value high enough to accomodate
+ * the expected size. If you set it to {@link #UNLIMITED_FIELD_LENGTH}, then
+ * the only limit is your memory, but you should anticipate an
+ * OutOfMemoryError.
+ *
+ * By default it is set to {@link #UNLIMITED_FIELD_LENGTH}.
+ */
+ public IndexWriterConfig setMaxFieldLength(int maxFieldLength) {
+ this.maxFieldLength = maxFieldLength;
+ return this;
+ }
+
+ /**
+ * Returns the maximum number of terms that will be indexed for a single field
+ * in a document.
+ *
+ * @see #setMaxFieldLength(int)
+ */
+ public int getMaxFieldLength() {
+ return maxFieldLength;
+ }
+
+ /**
+ * Expert: allows to open a certain commit point. The default is null which
+ * opens the latest commit point.
+ */
+ public IndexWriterConfig setIndexCommit(IndexCommit commit) {
+ this.commit = commit;
+ return this;
+ }
+
+ /**
+ * Returns the {@link IndexCommit} as specified in
+ * {@link #setIndexCommit(IndexCommit)} or the default, null
+ * which specifies to open the latest index commit point.
+ */
+ public IndexCommit getIndexCommit() {
+ return commit;
+ }
+
+ /**
+ * Expert: set the {@link Similarity} implementation used by this IndexWriter.
+ *
+ * NOTE: the similarity cannot be null. If null is passed,
+ * the similarity will be set to the default.
+ *
+ * @see Similarity#setDefault(Similarity)
+ */
+ public IndexWriterConfig setSimilarity(Similarity similarity) {
+ this.similarity = similarity == null ? Similarity.getDefault() : similarity;
+ return this;
+ }
+
+ /**
+ * Expert: returns the {@link Similarity} implementation used by this
+ * IndexWriter. This defaults to the current value of
+ * {@link Similarity#getDefault()}.
+ */
+ public Similarity getSimilarity() {
+ return similarity;
+ }
+
+ /**
+ * Expert: set the interval between indexed terms. Large values cause less
+ * memory to be used by IndexReader, but slow random-access to terms. Small
+ * values cause more memory to be used by an IndexReader, and speed
+ * random-access to terms.
+ *
+ * This parameter determines the amount of computation required per query
+ * term, regardless of the number of documents that contain that term. In
+ * particular, it is the maximum number of other terms that must be scanned
+ * before a term is located and its frequency and position information may be
+ * processed. In a large index with user-entered query terms, query processing
+ * time is likely to be dominated not by term lookup but rather by the
+ * processing of frequency and positional data. In a small index or when many
+ * uncommon query terms are generated (e.g., by wildcard queries) term lookup
+ * may become a dominant cost.
+ *
+ * In particular, numUniqueTerms/interval terms are read into
+ * memory by an IndexReader, and, on average, interval/2 terms
+ * must be scanned for each random term access.
+ *
+ * @see #DEFAULT_TERM_INDEX_INTERVAL
+ */
+ public IndexWriterConfig setTermIndexInterval(int interval) {
+ this.termIndexInterval = interval;
+ return this;
+ }
+
+ /**
+ * Returns the interval between indexed terms.
+ *
+ * @see #setTermIndexInterval(int)
+ */
+ public int getTermIndexInterval() {
+ return termIndexInterval;
+ }
+
+ /**
+ * Expert: sets the merge scheduler used by this writer. The default is
+ * {@link ConcurrentMergeScheduler}.
+ *
+ * NOTE: the merge scheduler cannot be null. If null is
+ * passed, the merge scheduler will be set to the default.
+ */
+ public IndexWriterConfig setMergeScheduler(MergeScheduler mergeScheduler) {
+ this.mergeScheduler = mergeScheduler == null ? new ConcurrentMergeScheduler() : mergeScheduler;
+ return this;
+ }
+
+ /**
+ * Returns the {@link MergeScheduler} that was set by
+ * {@link #setMergeScheduler(MergeScheduler)}
+ */
+ public MergeScheduler getMergeScheduler() {
+ return mergeScheduler;
+ }
+
+ /**
+ * Sets the maximum time to wait for a write lock (in milliseconds) for this
+ * instance. You can change the default value for all instances by calling
+ * {@link #setDefaultWriteLockTimeout(long)}.
+ */
+ public IndexWriterConfig setWriteLockTimeout(long writeLockTimeout) {
+ this.writeLockTimeout = writeLockTimeout;
+ return this;
+ }
+
+ /**
+ * Returns allowed timeout when acquiring the write lock.
+ *
+ * @see #setWriteLockTimeout(long)
+ */
+ public long getWriteLockTimeout() {
+ return writeLockTimeout;
+ }
+
+ /**
+ * Determines the minimal number of delete terms required before the buffered
+ * in-memory delete terms are applied and flushed. If there are documents
+ * buffered in memory at the time, they are merged and a new segment is
+ * created.
+
+ *
Disabled by default (writer flushes by RAM usage).
+ *
+ * @throws IllegalArgumentException if maxBufferedDeleteTerms
+ * is enabled but smaller than 1
+ * @see #setRAMBufferSizeMB
+ */
+ public IndexWriterConfig setMaxBufferedDeleteTerms(int maxBufferedDeleteTerms) {
+ if (maxBufferedDeleteTerms != DISABLE_AUTO_FLUSH
+ && maxBufferedDeleteTerms < 1)
+ throw new IllegalArgumentException(
+ "maxBufferedDeleteTerms must at least be 1 when enabled");
+ this.maxBufferedDeleteTerms = maxBufferedDeleteTerms;
+ return this;
+ }
+
+ /**
+ * Returns the number of buffered deleted terms that will trigger a flush if
+ * enabled.
+ *
+ * @see #setMaxBufferedDeleteTerms(int)
+ */
+ public int getMaxBufferedDeleteTerms() {
+ return maxBufferedDeleteTerms;
+ }
+
+ /**
+ * Determines the amount of RAM that may be used for buffering added documents
+ * and deletions before they are flushed to the Directory. Generally for
+ * faster indexing performance it's best to flush by RAM usage instead of
+ * document count and use as large a RAM buffer as you can.
+ *
+ *
+ * When this is set, the writer will flush whenever buffered documents and
+ * deletions use this much RAM. Pass in {@link #DISABLE_AUTO_FLUSH} to prevent
+ * triggering a flush due to RAM usage. Note that if flushing by document
+ * count is also enabled, then the flush will be triggered by whichever comes
+ * first.
+ *
+ *
+ * NOTE: the account of RAM usage for pending deletions is only
+ * approximate. Specifically, if you delete by Query, Lucene currently has no
+ * way to measure the RAM usage of individual Queries so the accounting will
+ * under-estimate and you should compensate by either calling commit()
+ * periodically yourself, or by using {@link #setMaxBufferedDeleteTerms(int)}
+ * to flush by count instead of RAM usage (each buffered delete Query counts
+ * as one).
+ *
+ *
+ * NOTE: because IndexWriter uses ints when managing its
+ * internal storage, the absolute maximum value for this setting is somewhat
+ * less than 2048 MB. The precise limit depends on various factors, such as
+ * how large your documents are, how many fields have norms, etc., so it's
+ * best to set this value comfortably under 2048.
+ *
+ *
+ * The default value is {@link #DEFAULT_RAM_BUFFER_SIZE_MB}.
+ *
+ * @throws IllegalArgumentException
+ * if ramBufferSize is enabled but non-positive, or it disables
+ * ramBufferSize when maxBufferedDocs is already disabled
+ */
+ public IndexWriterConfig setRAMBufferSizeMB(double ramBufferSizeMB) {
+ if (ramBufferSizeMB > 2048.0) {
+ throw new IllegalArgumentException("ramBufferSize " + ramBufferSizeMB
+ + " is too large; should be comfortably less than 2048");
+ }
+ if (ramBufferSizeMB != DISABLE_AUTO_FLUSH && ramBufferSizeMB <= 0.0)
+ throw new IllegalArgumentException(
+ "ramBufferSize should be > 0.0 MB when enabled");
+ if (ramBufferSizeMB == DISABLE_AUTO_FLUSH && maxBufferedDocs == DISABLE_AUTO_FLUSH)
+ throw new IllegalArgumentException(
+ "at least one of ramBufferSize and maxBufferedDocs must be enabled");
+ this.ramBufferSizeMB = ramBufferSizeMB;
+ return this;
+ }
+
+ /** Returns the value set by {@link #setRAMBufferSizeMB(double)} if enabled. */
+ public double getRAMBufferSizeMB() {
+ return ramBufferSizeMB;
+ }
+
+ /**
+ * Determines the minimal number of documents required before the buffered
+ * in-memory documents are flushed as a new Segment. Large values generally
+ * give faster indexing.
+ *
+ *
+ * When this is set, the writer will flush every maxBufferedDocs added
+ * documents. Pass in {@link #DISABLE_AUTO_FLUSH} to prevent triggering a
+ * flush due to number of buffered documents. Note that if flushing by RAM
+ * usage is also enabled, then the flush will be triggered by whichever comes
+ * first.
+ *
+ *
+ * Disabled by default (writer flushes by RAM usage).
+ *
+ * @see #setRAMBufferSizeMB(double)
+ *
+ * @throws IllegalArgumentException
+ * if maxBufferedDocs is enabled but smaller than 2, or it disables
+ * maxBufferedDocs when ramBufferSize is already disabled
+ */
+ public IndexWriterConfig setMaxBufferedDocs(int maxBufferedDocs) {
+ if (maxBufferedDocs != DISABLE_AUTO_FLUSH && maxBufferedDocs < 2)
+ throw new IllegalArgumentException(
+ "maxBufferedDocs must at least be 2 when enabled");
+ if (maxBufferedDocs == DISABLE_AUTO_FLUSH
+ && ramBufferSizeMB == DISABLE_AUTO_FLUSH)
+ throw new IllegalArgumentException(
+ "at least one of ramBufferSize and maxBufferedDocs must be enabled");
+ this.maxBufferedDocs = maxBufferedDocs;
+ return this;
+ }
+
+ /**
+ * Returns the number of buffered added documents that will trigger a flush if
+ * enabled.
+ *
+ * @see #setMaxBufferedDocs(int)
+ */
+ public int getMaxBufferedDocs() {
+ return maxBufferedDocs;
+ }
+
+ /** Set the merged segment warmer. See {@link IndexReaderWarmer}. */
+ public IndexWriterConfig setMergedSegmentWarmer(IndexReaderWarmer mergeSegmentWarmer) {
+ this.mergedSegmentWarmer = mergeSegmentWarmer;
+ return this;
+ }
+
+ /** Returns the current merged segment warmer. See {@link IndexReaderWarmer}. */
+ public IndexReaderWarmer getMergedSegmentWarmer() {
+ return mergedSegmentWarmer;
+ }
+
+
+ /** Expert: sets the {@link DocConsumer} chain to be used to process documents. */
+ IndexWriterConfig setIndexingChain(IndexingChain indexingChain) {
+ this.indexingChain = indexingChain == null ? DocumentsWriter.defaultIndexingChain : indexingChain;
+ return this;
+ }
+
+ /** Returns the indexing chain set on {@link #setIndexingChain(IndexingChain)}. */
+ IndexingChain getIndexingChain() {
+ return indexingChain;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("matchVersion=").append(matchVersion).append("\n");
+ sb.append("analyzer=").append(analyzer.getClass().getName()).append("\n");
+ sb.append("delPolicy=").append(delPolicy.getClass().getName()).append("\n");
+ sb.append("commit=").append(commit == null ? "null" : commit.getClass().getName()).append("\n");
+ sb.append("openMode=").append(openMode).append("\n");
+ sb.append("maxFieldLength=").append(maxFieldLength).append("\n");
+ sb.append("similarity=").append(similarity.getClass().getName()).append("\n");
+ sb.append("termIndexInterval=").append(termIndexInterval).append("\n");
+ sb.append("mergeScheduler=").append(mergeScheduler.getClass().getName()).append("\n");
+ sb.append("default WRITE_LOCK_TIMEOUT=").append(WRITE_LOCK_TIMEOUT).append("\n");
+ sb.append("writeLockTimeout=").append(writeLockTimeout).append("\n");
+ sb.append("maxBufferedDeleteTerms=").append(maxBufferedDeleteTerms).append("\n");
+ sb.append("ramBufferSizeMB=").append(ramBufferSizeMB).append("\n");
+ sb.append("maxBufferedDocs=").append(maxBufferedDocs).append("\n");
+ sb.append("mergedSegmentWarmer=").append(mergedSegmentWarmer).append("\n");
+ return sb.toString();
+ }
+}
Property changes on: src\java\org\apache\lucene\index\IndexWriterConfig.java
___________________________________________________________________
Added: svn:keywords
+ Date Author Id Revision HeadURL
Added: svn:eol-style
+ native
Index: src/java/org/apache/lucene/index/SegmentMerger.java
===================================================================
--- src/java/org/apache/lucene/index/SegmentMerger.java (revision 921633)
+++ src/java/org/apache/lucene/index/SegmentMerger.java (working copy)
@@ -48,7 +48,7 @@
private Directory directory;
private String segment;
- private int termIndexInterval = IndexWriter.DEFAULT_TERM_INDEX_INTERVAL;
+ private int termIndexInterval = IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL;
private List readers = new ArrayList();
private FieldInfos fieldInfos;
@@ -96,7 +96,7 @@
}
};
}
- termIndexInterval = writer.getTermIndexInterval();
+ termIndexInterval = writer.getConfig().getTermIndexInterval();
}
boolean hasProx() {
Index: src/test/org/apache/lucene/TestDemo.java
===================================================================
--- src/test/org/apache/lucene/TestDemo.java (revision 921633)
+++ src/test/org/apache/lucene/TestDemo.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.queryParser.ParseException;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.IndexSearcher;
@@ -49,8 +50,9 @@
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
- IndexWriter iwriter = new IndexWriter(directory, analyzer, true,
- new IndexWriter.MaxFieldLength(25000));
+ IndexWriter iwriter = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxFieldLength(25000));
+
Document doc = new Document();
String text = "This is the text to be indexed.";
doc.add(new Field("fieldname", text, Field.Store.YES,
Index: src/test/org/apache/lucene/TestMergeSchedulerExternal.java
===================================================================
--- src/test/org/apache/lucene/TestMergeSchedulerExternal.java (revision 921633)
+++ src/test/org/apache/lucene/TestMergeSchedulerExternal.java (working copy)
@@ -18,11 +18,12 @@
*/
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.ConcurrentMergeScheduler;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -86,15 +87,14 @@
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- MyMergeScheduler ms = new MyMergeScheduler();
- writer.setMergeScheduler(ms);
- writer.setMaxBufferedDocs(2);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMergeScheduler(new MyMergeScheduler())
+ .setMaxBufferedDocs(2).setRAMBufferSizeMB(
+ IndexWriterConfig.DISABLE_AUTO_FLUSH));
for(int i=0;i<20;i++)
writer.addDocument(doc);
- ms.sync();
+ ((MyMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.close();
assertTrue(mergeThreadCreated);
Index: src/test/org/apache/lucene/TestSearch.java
===================================================================
--- src/test/org/apache/lucene/TestSearch.java (revision 921633)
+++ src/test/org/apache/lucene/TestSearch.java (working copy)
@@ -70,15 +70,15 @@
private void doTestSearch(PrintWriter out, boolean useCompoundFile)
- throws Exception
- {
+ throws Exception {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(directory, analyzer, true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
- writer.setUseCompoundFile(useCompoundFile);
-
String[] docs = {
"a b c d e",
"a b c d e a b c d e",
Index: src/test/org/apache/lucene/TestSearchForDuplicates.java
===================================================================
--- src/test/org/apache/lucene/TestSearchForDuplicates.java (revision 921633)
+++ src/test/org/apache/lucene/TestSearchForDuplicates.java (working copy)
@@ -78,11 +78,12 @@
private void doTest(PrintWriter out, boolean useCompoundFiles) throws Exception {
Directory directory = new RAMDirectory();
Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(directory, analyzer, true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFiles);
+ lmp.setUseCompoundDocStore(useCompoundFiles);
- writer.setUseCompoundFile(useCompoundFiles);
-
final int MAX_DOCS = 225;
for (int j = 0; j < MAX_DOCS; j++) {
Index: src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java
===================================================================
--- src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java (revision 921633)
+++ src/test/org/apache/lucene/TestSnapshotDeletionPolicy.java (working copy)
@@ -31,6 +31,7 @@
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexCommit;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.TestIndexWriter;
@@ -67,9 +68,10 @@
Directory dir = new MockRAMDirectory();
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
- // Force frequent flushes
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
+ .setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<7;i++) {
@@ -83,7 +85,8 @@
writer.close();
copyFiles(dir, cp);
- writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp));
copyFiles(dir, cp);
for(int i=0;i<7;i++) {
writer.addDocument(doc);
@@ -95,7 +98,8 @@
writer.close();
copyFiles(dir, cp);
dp.release();
- writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp));
writer.close();
try {
copyFiles(dir, cp);
@@ -111,11 +115,11 @@
final long stopTime = System.currentTimeMillis() + 1000;
SnapshotDeletionPolicy dp = new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy());
- final IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), dp, IndexWriter.MaxFieldLength.UNLIMITED);
+ final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(dp)
+ .setMaxBufferedDocs(2));
- // Force frequent flushes
- writer.setMaxBufferedDocs(2);
-
final Thread t = new Thread() {
@Override
public void run() {
Index: src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java
===================================================================
--- src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java (revision 921633)
+++ src/test/org/apache/lucene/analysis/TestCachingTokenFilter.java (working copy)
@@ -27,6 +27,7 @@
import org.apache.lucene.document.Field.TermVector;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermPositions;
import org.apache.lucene.store.Directory;
@@ -37,7 +38,8 @@
public void testCaching() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
TokenStream stream = new TokenStream() {
private int index = 0;
Index: src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java
===================================================================
--- src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (revision 921633)
+++ src/test/org/apache/lucene/analysis/TestKeywordAnalyzer.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.queryParser.QueryParser;
@@ -41,9 +42,9 @@
protected void setUp() throws Exception {
super.setUp();
directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory,
- new SimpleAnalyzer(TEST_VERSION_CURRENT),
- true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(
+ TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.NOT_ANALYZED));
@@ -70,7 +71,7 @@
public void testMutipleDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir,new KeywordAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new KeywordAnalyzer()));
Document doc = new Document();
doc.add(new Field("partnum", "Q36", Field.Store.YES, Field.Index.ANALYZED));
writer.addDocument(doc);
Index: src/test/org/apache/lucene/collation/CollationTestBase.java
===================================================================
--- src/test/org/apache/lucene/collation/CollationTestBase.java (revision 921633)
+++ src/test/org/apache/lucene/collation/CollationTestBase.java (working copy)
@@ -23,6 +23,7 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
@@ -69,8 +70,8 @@
String firstEnd, String secondBeg,
String secondEnd) throws Exception {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter writer = new IndexWriter
- (ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628",
Field.Store.YES, Field.Index.ANALYZED));
@@ -101,8 +102,8 @@
String firstEnd, String secondBeg,
String secondEnd) throws Exception {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter writer = new IndexWriter
- (ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(ramDir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
// Unicode order would include U+0633 in [ U+062F - U+0698 ], but Farsi
@@ -125,13 +126,12 @@
searcher.close();
}
- public void testFarsiTermRangeQuery
- (Analyzer analyzer, String firstBeg, String firstEnd,
- String secondBeg, String secondEnd) throws Exception {
+ public void testFarsiTermRangeQuery(Analyzer analyzer, String firstBeg,
+ String firstEnd, String secondBeg, String secondEnd) throws Exception {
RAMDirectory farsiIndex = new RAMDirectory();
- IndexWriter writer = new IndexWriter
- (farsiIndex, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(farsiIndex, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("content", "\u0633\u0627\u0628",
Field.Store.YES, Field.Index.ANALYZED));
@@ -178,8 +178,8 @@
analyzer.addAnalyzer("France", franceAnalyzer);
analyzer.addAnalyzer("Sweden", swedenAnalyzer);
analyzer.addAnalyzer("Denmark", denmarkAnalyzer);
- IndexWriter writer = new IndexWriter
- (indexStore, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(indexStore, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer));
// document data:
// the tracer field is used to determine which document was hit
Index: src/test/org/apache/lucene/document/TestBinaryDocument.java
===================================================================
--- src/test/org/apache/lucene/document/TestBinaryDocument.java (revision 921633)
+++ src/test/org/apache/lucene/document/TestBinaryDocument.java (working copy)
@@ -5,6 +5,7 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.MockRAMDirectory;
/**
@@ -27,8 +28,7 @@
/**
* Tests {@link Document} class.
*/
-public class TestBinaryDocument extends LuceneTestCase
-{
+public class TestBinaryDocument extends LuceneTestCase {
String binaryValStored = "this text will be stored as a byte array in the index";
String binaryValCompressed = "this text will be also stored and compressed as a byte array in the index";
@@ -58,7 +58,8 @@
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
writer.addDocument(doc);
writer.close();
@@ -83,9 +84,7 @@
dir.close();
}
- public void testCompressionTools()
- throws Exception
- {
+ public void testCompressionTools() throws Exception {
Fieldable binaryFldCompressed = new Field("binaryCompressed", CompressionTools.compress(binaryValCompressed.getBytes()));
Fieldable stringFldCompressed = new Field("stringCompressed", CompressionTools.compressString(binaryValCompressed));
@@ -96,7 +95,8 @@
/** add the doc to a ram index */
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
writer.addDocument(doc);
writer.close();
Index: src/test/org/apache/lucene/document/TestDocument.java
===================================================================
--- src/test/org/apache/lucene/document/TestDocument.java (revision 921633)
+++ src/test/org/apache/lucene/document/TestDocument.java (working copy)
@@ -2,6 +2,7 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@@ -151,10 +152,11 @@
*
* @throws Exception on error
*/
- public void testGetValuesForIndexedDocument() throws Exception
- {
+ public void testGetValuesForIndexedDocument() throws Exception {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
writer.addDocument(makeDocumentWithFields());
writer.close();
@@ -225,7 +227,9 @@
doc.add(new Field("keyword", "test", Field.Store.YES, Field.Index.NOT_ANALYZED));
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
writer.addDocument(doc);
field.setValue("id2");
writer.addDocument(doc);
Index: src/test/org/apache/lucene/index/DocHelper.java
===================================================================
--- src/test/org/apache/lucene/index/DocHelper.java (revision 921633)
+++ src/test/org/apache/lucene/index/DocHelper.java (working copy)
@@ -232,10 +232,9 @@
* @param doc
* @throws IOException
*/
- public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException
- {
- IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
- writer.setSimilarity(similarity);
+ public static SegmentInfo writeDoc(Directory dir, Analyzer analyzer, Similarity similarity, Document doc) throws IOException {
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setSimilarity(similarity));
//writer.setUseCompoundFile(false);
writer.addDocument(doc);
writer.commit();
Index: src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java
===================================================================
--- src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestAddIndexesNoOptimize.java (working copy)
@@ -23,6 +23,7 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
@@ -39,27 +40,30 @@
IndexWriter writer = null;
- writer = newWriter(dir, true);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.close();
- writer = newWriter(aux, true);
- writer.setUseCompoundFile(false); // use one without a compound file
+ writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 40 documents in separate files
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
- writer = newWriter(aux2, true);
+ writer = newWriter(aux2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE));
// add 40 documents in compound files
addDocs2(writer, 50);
assertEquals(50, writer.maxDoc());
writer.close();
// test doc count before segments are merged
- writer = newWriter(dir, false);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
assertEquals(100, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
assertEquals(190, writer.maxDoc());
@@ -73,14 +77,14 @@
// now add another set in.
Directory aux3 = new RAMDirectory();
- writer = newWriter(aux3, true);
+ writer = newWriter(aux3, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
// add 40 documents
addDocs(writer, 40);
assertEquals(40, writer.maxDoc());
writer.close();
// test doc count before segments are merged/index is optimized
- writer = newWriter(dir, false);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
assertEquals(190, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux3 });
assertEquals(230, writer.maxDoc());
@@ -94,7 +98,7 @@
verifyTermDocs(dir, new Term("content", "bbb"), 50);
// now optimize it.
- writer = newWriter(dir, false);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@@ -107,11 +111,11 @@
// now add a single document
Directory aux4 = new RAMDirectory();
- writer = newWriter(aux4, true);
+ writer = newWriter(aux4, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
addDocs2(writer, 1);
writer.close();
- writer = newWriter(dir, false);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
assertEquals(230, writer.maxDoc());
writer.addIndexesNoOptimize(new Directory[] { aux4 });
assertEquals(231, writer.maxDoc());
@@ -129,7 +133,7 @@
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.addIndexesNoOptimize(new Directory[] {aux});
// Adds 10 docs, then replaces them with another 10
@@ -166,7 +170,7 @@
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@@ -205,7 +209,7 @@
Directory aux = new RAMDirectory();
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
// Adds 10 docs, then replaces them with another 10
// docs, so 10 pending deletes:
@@ -246,25 +250,25 @@
IndexWriter writer = null;
- writer = newWriter(dir, true);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
// add 100 documents
addDocs(writer, 100);
assertEquals(100, writer.maxDoc());
writer.close();
- writer = newWriter(aux, true);
- writer.setUseCompoundFile(false); // use one without a compound file
- writer.setMaxBufferedDocs(1000);
+ writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
// add 140 documents in separate files
addDocs(writer, 40);
writer.close();
- writer = newWriter(aux, true);
- writer.setUseCompoundFile(false); // use one without a compound file
- writer.setMaxBufferedDocs(1000);
+ writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
addDocs(writer, 100);
writer.close();
- writer = newWriter(dir, false);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
try {
// cannot add self
writer.addIndexesNoOptimize(new Directory[] { aux, dir });
@@ -290,9 +294,10 @@
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(4);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
addDocs(writer, 10);
writer.addIndexesNoOptimize(new Directory[] { aux });
@@ -314,9 +319,8 @@
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
- writer.setMaxBufferedDocs(9);
- writer.setMergeFactor(4);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(9));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
addDocs(writer, 2);
writer.addIndexesNoOptimize(new Directory[] { aux });
@@ -338,9 +342,10 @@
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(dir, false);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(4);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
assertEquals(1060, writer.maxDoc());
@@ -367,9 +372,10 @@
assertEquals(10, reader.numDocs());
reader.close();
- IndexWriter writer = newWriter(dir, false);
- writer.setMaxBufferedDocs(4);
- writer.setMergeFactor(4);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(4));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, new RAMDirectory(aux) });
assertEquals(1020, writer.maxDoc());
@@ -390,9 +396,10 @@
setUpDirs(dir, aux);
- IndexWriter writer = newWriter(aux2, true);
- writer.setMaxBufferedDocs(100);
- writer.setMergeFactor(10);
+ IndexWriter writer = newWriter(aux2, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.addIndexesNoOptimize(new Directory[] { aux });
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
@@ -412,9 +419,9 @@
assertEquals(22, reader.numDocs());
reader.close();
- writer = newWriter(dir, false);
- writer.setMaxBufferedDocs(6);
- writer.setMergeFactor(4);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(6));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
writer.addIndexesNoOptimize(new Directory[] { aux, aux2 });
assertEquals(1025, writer.maxDoc());
@@ -425,9 +432,9 @@
verifyNumDocs(dir, 1025);
}
- private IndexWriter newWriter(Directory dir, boolean create)
+ private IndexWriter newWriter(Directory dir, IndexWriterConfig conf)
throws IOException {
- final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), create, IndexWriter.MaxFieldLength.UNLIMITED);
+ final IndexWriter writer = new IndexWriter(dir, conf);
writer.setMergePolicy(new LogDocMergePolicy(writer));
return writer;
}
@@ -471,26 +478,25 @@
private void setUpDirs(Directory dir, Directory aux) throws IOException {
IndexWriter writer = null;
- writer = newWriter(dir, true);
- writer.setMaxBufferedDocs(1000);
+ writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(1000));
// add 1000 documents in 1 segment
addDocs(writer, 1000);
assertEquals(1000, writer.maxDoc());
assertEquals(1, writer.getSegmentCount());
writer.close();
- writer = newWriter(aux, true);
- writer.setUseCompoundFile(false); // use one without a compound file
- writer.setMaxBufferedDocs(100);
- writer.setMergeFactor(10);
+ writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(100));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
// add 30 documents in 3 segments
for (int i = 0; i < 3; i++) {
addDocs(writer, 10);
writer.close();
- writer = newWriter(aux, false);
- writer.setUseCompoundFile(false); // use one without a compound file
- writer.setMaxBufferedDocs(100);
- writer.setMergeFactor(10);
+ writer = newWriter(aux, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(100));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false); // use one without a compound file
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
}
assertEquals(30, writer.maxDoc());
assertEquals(3, writer.getSegmentCount());
@@ -501,18 +507,19 @@
public void testHangOnClose() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMergePolicy(new LogByteSizeMergePolicy(writer));
- writer.setMaxBufferedDocs(5);
- writer.setUseCompoundFile(false);
- writer.setMergeFactor(100);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(5));
+ LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
+ lmp.setMergeFactor(100);
+ writer.setMergePolicy(lmp);
Document doc = new Document();
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<60;i++)
writer.addDocument(doc);
- writer.setMaxBufferedDocs(200);
+
Document doc2 = new Document();
doc2.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
Field.Index.NO));
@@ -527,13 +534,13 @@
writer.close();
Directory dir2 = new MockRAMDirectory();
- writer = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer);
+ writer = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMergeScheduler(new SerialMergeScheduler()));
+ lmp = new LogByteSizeMergePolicy(writer);
lmp.setMinMergeMB(0.0001);
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
+ lmp.setMergeFactor(4);
writer.setMergePolicy(lmp);
- writer.setMergeFactor(4);
- writer.setUseCompoundFile(false);
- writer.setMergeScheduler(new SerialMergeScheduler());
writer.addIndexesNoOptimize(new Directory[] {dir});
writer.close();
dir.close();
@@ -544,14 +551,16 @@
// is respected when copying tail segments
public void testTargetCFS() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = newWriter(dir, true);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = newWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
addDocs(writer, 1);
writer.close();
Directory other = new RAMDirectory();
- writer = newWriter(other, true);
- writer.setUseCompoundFile(true);
+ writer = newWriter(other, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(true);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(true);
writer.addIndexesNoOptimize(new Directory[] {dir});
assertTrue(writer.newestSegment().getUseCompoundFile());
writer.close();
Index: src/test/org/apache/lucene/index/TestAtomicUpdate.java
===================================================================
--- src/test/org/apache/lucene/index/TestAtomicUpdate.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestAtomicUpdate.java (working copy)
@@ -18,21 +18,21 @@
import org.apache.lucene.util.*;
import org.apache.lucene.store.*;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.*;
-import org.apache.lucene.analysis.*;
import java.util.Random;
import java.io.File;
import java.io.IOException;
public class TestAtomicUpdate extends LuceneTestCase {
- private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
- private Random RANDOM;
+
+ private static final class MockIndexWriter extends IndexWriter {
- public class MockIndexWriter extends IndexWriter {
+ static Random RANDOM;
- public MockIndexWriter(Directory dir, Analyzer a, boolean create, IndexWriter.MaxFieldLength mfl) throws IOException {
- super(dir, a, create, mfl);
+ public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
+ super(dir, conf);
}
@Override
@@ -126,9 +126,10 @@
TimedThread[] threads = new TimedThread[4];
- IndexWriter writer = new MockIndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(7);
- writer.setMergeFactor(3);
+ IndexWriter writer = new MockIndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(7));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
// Establish a base index of 100 docs:
for(int i=0;i<100;i++) {
@@ -183,7 +184,7 @@
FSDirectory.
*/
public void testAtomicUpdates() throws Exception {
- RANDOM = newRandom();
+ MockIndexWriter.RANDOM = newRandom();
Directory directory;
// First in a RAM directory:
Index: src/test/org/apache/lucene/index/TestBackwardsCompatibility.java
===================================================================
--- src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestBackwardsCompatibility.java (working copy)
@@ -38,6 +38,7 @@
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
@@ -52,8 +53,7 @@
against it, and add documents to it.
*/
-public class TestBackwardsCompatibility extends LuceneTestCase
-{
+public class TestBackwardsCompatibility extends LuceneTestCase {
// Uncomment these cases & run them on an older Lucene
// version, to generate an index to test backwards
@@ -215,7 +215,8 @@
hasTested29++;
}
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
w.optimize();
w.close();
@@ -355,7 +356,7 @@
Directory dir = FSDirectory.open(new File(dirName));
// open writer
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
// add 10 docs
for(int i=0;i<10;i++) {
@@ -399,7 +400,7 @@
searcher.close();
// optimize
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@@ -449,7 +450,7 @@
searcher.close();
// optimize
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@@ -471,9 +472,9 @@
dirName = fullDir(dirName);
Directory dir = FSDirectory.open(new File(dirName));
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(doCFS);
- writer.setMaxBufferedDocs(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
for(int i=0;i<35;i++) {
addDoc(writer, i);
@@ -482,9 +483,9 @@
writer.close();
// open fresh writer so we get no prx file in the added segment
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(doCFS);
- writer.setMaxBufferedDocs(10);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(doCFS);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(doCFS);
addNoProxDoc(writer);
writer.close();
@@ -509,8 +510,7 @@
try {
Directory dir = FSDirectory.open(new File(fullDir(outputDir)));
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setRAMBufferSizeMB(16.0);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for(int i=0;i<35;i++) {
addDoc(writer, i);
}
Index: src/test/org/apache/lucene/index/TestCheckIndex.java
===================================================================
--- src/test/org/apache/lucene/index/TestCheckIndex.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestCheckIndex.java (working copy)
@@ -34,9 +34,7 @@
public void testDeletedDocs() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<19;i++) {
Index: src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java
===================================================================
--- src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestConcurrentMergeScheduler.java (working copy)
@@ -17,20 +17,18 @@
* limitations under the License.
*/
-import org.apache.lucene.analysis.SimpleAnalyzer;
-import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
public class TestConcurrentMergeScheduler extends LuceneTestCase {
- private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
-
private static class FailOnlyOnFlush extends MockRAMDirectory.Failure {
boolean doFail;
boolean hitExc;
@@ -68,10 +66,7 @@
FailOnlyOnFlush failure = new FailOnlyOnFlush();
directory.failOn(failure);
- IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.setMergeScheduler(cms);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
Document doc = new Document();
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
@@ -115,9 +110,7 @@
RAMDirectory directory = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.setMergeScheduler(cms);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
writer.setMergePolicy(mp);
@@ -157,12 +150,11 @@
RAMDirectory directory = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(2));
for(int iter=0;iter<7;iter++) {
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.setMergeScheduler(cms);
- writer.setMaxBufferedDocs(2);
for(int j=0;j<21;j++) {
Document doc = new Document();
@@ -174,7 +166,9 @@
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
- writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
writer.close();
@@ -189,13 +183,10 @@
Field idField = new Field("id", "", Field.Store.YES, Field.Index.NOT_ANALYZED);
doc.add(idField);
- IndexWriter writer = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
for(int iter=0;iter<10;iter++) {
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
- writer.setMergeScheduler(cms);
- writer.setMaxBufferedDocs(2);
- writer.setMergeFactor(100);
for(int j=0;j<201;j++) {
idField.setValue(Integer.toString(iter*201+j));
@@ -210,7 +201,7 @@
// Force a bunch of merge threads to kick off so we
// stress out aborting them on close:
- writer.setMergeFactor(3);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(3);
writer.addDocument(doc);
writer.commit();
@@ -221,7 +212,8 @@
reader.close();
// Reopen
- writer = new IndexWriter(directory, ANALYZER, false, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
}
writer.close();
Index: src/test/org/apache/lucene/index/TestCrash.java
===================================================================
--- src/test/org/apache/lucene/index/TestCrash.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestCrash.java (working copy)
@@ -20,9 +20,9 @@
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.NoLockFactory;
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -35,10 +35,8 @@
private IndexWriter initIndex(MockRAMDirectory dir) throws IOException {
dir.setLockFactory(NoLockFactory.getNoLockFactory());
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
- //writer.setMaxBufferedDocs(2);
- writer.setMaxBufferedDocs(10);
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
Document doc = new Document();
doc.add(new Field("content", "aaa", Field.Store.YES, Field.Index.ANALYZED));
@@ -51,7 +49,7 @@
private void crash(final IndexWriter writer) throws IOException {
final MockRAMDirectory dir = (MockRAMDirectory) writer.getDirectory();
- ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getMergeScheduler();
+ ConcurrentMergeScheduler cms = (ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler();
dir.crash();
cms.sync();
dir.clearCrash();
Index: src/test/org/apache/lucene/index/TestDeletionPolicy.java
===================================================================
--- src/test/org/apache/lucene/index/TestDeletionPolicy.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestDeletionPolicy.java (working copy)
@@ -26,6 +26,7 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
@@ -40,8 +41,8 @@
against it, and add documents to it.
*/
-public class TestDeletionPolicy extends LuceneTestCase
-{
+public class TestDeletionPolicy extends LuceneTestCase {
+
private void verifyCommitOrder(List extends IndexCommit> commits) throws IOException {
final IndexCommit firstCommit = commits.get(0);
long last = SegmentInfos.generationFromSegmentsFileName(firstCommit.getSegmentsFileName());
@@ -201,8 +202,10 @@
Directory dir = new RAMDirectory();
ExpirationTimeDeletionPolicy policy = new ExpirationTimeDeletionPolicy(dir, SECONDS);
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setUseCompoundFile(useCompoundFile);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(policy));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
writer.close();
long lastDeleteTime = 0;
@@ -210,8 +213,11 @@
// Record last time when writer performed deletes of
// past commits
lastDeleteTime = System.currentTimeMillis();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setUseCompoundFile(useCompoundFile);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
+ lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
for(int j=0;j<17;j++) {
addDoc(writer);
}
@@ -271,17 +277,24 @@
Directory dir = new RAMDirectory();
policy.dir = dir;
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setUseCompoundFile(useCompoundFile);
- writer.setMergeScheduler(new SerialMergeScheduler());
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setIndexDeletionPolicy(policy).setMaxBufferedDocs(10)
+ .setMergeScheduler(new SerialMergeScheduler()));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setUseCompoundFile(useCompoundFile);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
+ OpenMode.APPEND).setIndexDeletionPolicy(policy));
+ lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
writer.optimize();
writer.close();
@@ -318,7 +331,10 @@
// Open & close a writer and assert that it
// actually removed something:
int preCount = dir.listAll().length;
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT,
+ new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(
+ OpenMode.APPEND).setIndexDeletionPolicy(policy));
writer.close();
int postCount = dir.listAll().length;
assertTrue(postCount < preCount);
@@ -340,8 +356,9 @@
Directory dir = new MockRAMDirectory();
policy.dir = dir;
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setIndexDeletionPolicy(policy).setMaxBufferedDocs(2));
for(int i=0;i<10;i++) {
addDoc(writer);
if ((1+i)%2 == 0)
@@ -359,7 +376,7 @@
assertTrue(lastCommit != null);
// Now add 1 doc and optimize
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(policy));
addDoc(writer);
assertEquals(11, writer.numDocs());
writer.optimize();
@@ -368,7 +385,8 @@
assertEquals(7, IndexReader.listCommits(dir).size());
// Now open writer on the commit just before optimize:
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Should undo our rollback:
@@ -380,7 +398,8 @@
assertEquals(11, r.numDocs());
r.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED, lastCommit);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setIndexDeletionPolicy(policy).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Commits the rollback:
writer.close();
@@ -396,7 +415,7 @@
r.close();
// Reoptimize
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), policy, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(policy));
writer.optimize();
writer.close();
@@ -407,7 +426,7 @@
// Now open writer on the commit just before optimize,
// but this time keeping only the last commit:
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), new KeepOnlyLastCommitDeletionPolicy(), IndexWriter.MaxFieldLength.LIMITED, lastCommit);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexCommit(lastCommit));
assertEquals(10, writer.numDocs());
// Reader still sees optimized index, because writer
@@ -443,16 +462,23 @@
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setUseCompoundFile(useCompoundFile);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE).setIndexDeletionPolicy(policy)
+ .setMaxBufferedDocs(10));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
for(int i=0;i<107;i++) {
addDoc(writer);
}
writer.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, policy, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setUseCompoundFile(useCompoundFile);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setIndexDeletionPolicy(policy));
+ lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(useCompoundFile);
+ lmp.setUseCompoundDocStore(useCompoundFile);
writer.optimize();
writer.close();
@@ -486,9 +512,13 @@
KeepLastNDeletionPolicy policy = new KeepLastNDeletionPolicy(N);
for(int j=0;j lastFlushCount);
lastFlushCount = flushCount;
writer.setRAMBufferSizeMB(0.000001);
- writer.setMaxBufferedDocs(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
} else if (j < 20) {
assertTrue(flushCount > lastFlushCount);
lastFlushCount = flushCount;
} else if (20 == j) {
writer.setRAMBufferSizeMB(16);
- writer.setMaxBufferedDocs(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
lastFlushCount = flushCount;
} else if (j < 30) {
assertEquals(flushCount, lastFlushCount);
} else if (30 == j) {
writer.setRAMBufferSizeMB(0.000001);
- writer.setMaxBufferedDocs(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
} else if (j < 40) {
assertTrue(flushCount> lastFlushCount);
lastFlushCount = flushCount;
} else if (40 == j) {
writer.setMaxBufferedDocs(10);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
lastFlushCount = flushCount;
} else if (j < 50) {
assertEquals(flushCount, lastFlushCount);
writer.setMaxBufferedDocs(10);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
} else if (50 == j) {
assertTrue(flushCount > lastFlushCount);
}
@@ -1259,12 +1264,15 @@
dir.close();
}
+ /**
+ * @deprecated after setters on IW go away, this test can be deleted because
+ * changing those settings on IW won't be possible.
+ */
public void testChangingRAMBuffer2() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMaxBufferedDeleteTerms(10);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10).setMaxBufferedDeleteTerms(
+ 10).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH));
for(int j=1;j<52;j++) {
Document doc = new Document();
@@ -1292,25 +1300,25 @@
lastFlushCount = flushCount;
} else if (20 == j) {
writer.setRAMBufferSizeMB(16);
- writer.setMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
lastFlushCount = flushCount;
} else if (j < 30) {
assertEquals(flushCount, lastFlushCount);
} else if (30 == j) {
writer.setRAMBufferSizeMB(0.000001);
- writer.setMaxBufferedDeleteTerms(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH);
writer.setMaxBufferedDeleteTerms(1);
} else if (j < 40) {
assertTrue(flushCount> lastFlushCount);
lastFlushCount = flushCount;
} else if (40 == j) {
writer.setMaxBufferedDeleteTerms(10);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
lastFlushCount = flushCount;
} else if (j < 50) {
assertEquals(flushCount, lastFlushCount);
writer.setMaxBufferedDeleteTerms(10);
- writer.setRAMBufferSizeMB(IndexWriter.DISABLE_AUTO_FLUSH);
+ writer.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
} else if (50 == j) {
assertTrue(flushCount > lastFlushCount);
}
@@ -1321,8 +1329,7 @@
public void testDiverseDocs() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setRAMBufferSizeMB(0.5);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setRAMBufferSizeMB(0.5));
Random rand = newRandom();
for(int i=0;i<3;i++) {
// First, docs where every term is unique (heavy on
@@ -1370,8 +1377,7 @@
public void testEnablingNorms() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, pre flush
for(int j=0;j<10;j++) {
Document doc = new Document();
@@ -1391,8 +1397,8 @@
assertEquals(10, hits.length);
searcher.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10));
// Enable norms for only 1 doc, post flush
for(int j=0;j<27;j++) {
Document doc = new Document();
@@ -1417,8 +1423,8 @@
public void testHighFreqTerm() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, new IndexWriter.MaxFieldLength(100000000));
- writer.setRAMBufferSizeMB(0.01);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxFieldLength(100000000).setRAMBufferSizeMB(0.01));
// Massive doc that has 128 K a's
StringBuilder b = new StringBuilder(1024*1024);
for(int i=0;i<4096;i++) {
@@ -1464,7 +1470,8 @@
}
Directory dir = new MyRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
@@ -1475,7 +1482,8 @@
assertEquals("did not get right number of hits", 100, hits.length);
writer.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
writer.close();
dir.close();
@@ -1483,8 +1491,8 @@
public void testFlushWithNoMerging() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
for(int i=0;i<19;i++)
@@ -1502,7 +1510,7 @@
// empty doc (no norms) and flush
public void testEmptyDocAfterFlushingRealDoc() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
writer.addDocument(doc);
@@ -1521,12 +1529,12 @@
Directory dir = new MockRAMDirectory();
for(int pass=0;pass<2;pass++) {
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMergeScheduler(new ConcurrentMergeScheduler());
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE)
+ .setMaxBufferedDocs(2));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
Document doc = new Document();
doc.add(new Field("field", "aaa", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
- writer.setMaxBufferedDocs(2);
- writer.setMergeFactor(101);
for(int i=0;i<200;i++)
writer.addDocument(doc);
writer.optimize(false);
@@ -1575,20 +1583,21 @@
*/
public void testBadSegment() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter ir = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document document = new Document();
- document.add(new Field("tvtest", "", Field.Store.NO, Field.Index.ANALYZED,
- Field.TermVector.YES));
- ir.addDocument(document);
- ir.close();
+ document.add(new Field("tvtest", "", Store.NO, Index.ANALYZED, TermVector.YES));
+ iw.addDocument(document);
+ iw.close();
dir.close();
}
// LUCENE-1008
public void testNoTermVectorAfterTermVector() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1614,7 +1623,8 @@
// LUCENE-1010
public void testNoTermVectorAfterTermVectorMerge() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1646,12 +1656,13 @@
int pri = Thread.currentThread().getPriority();
try {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(2));
+ ((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(2);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
- iw.setMaxBufferedDocs(2);
- iw.setMergeFactor(2);
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
for(int i=0;i<4;i++)
iw.addDocument(document);
@@ -1686,11 +1697,12 @@
// LUCENE-1013
public void testSetMaxMergeDocs() throws IOException {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- iw.setMergeScheduler(new MyMergeScheduler());
- iw.setMaxMergeDocs(20);
- iw.setMaxBufferedDocs(2);
- iw.setMergeFactor(2);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT))
+ .setMergeScheduler(new MyMergeScheduler()).setMaxBufferedDocs(2));
+ LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
+ lmp.setMaxMergeDocs(20);
+ lmp.setMergeFactor(2);
Document document = new Document();
document.add(new Field("tvtest", "a b c", Field.Store.NO, Field.Index.ANALYZED,
Field.TermVector.YES));
@@ -1702,7 +1714,7 @@
// LUCENE-1072
public void testExceptionFromTokenStream() throws IOException {
RAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new Analyzer() {
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new Analyzer() {
@Override
public TokenStream tokenStream(String fieldName, Reader reader) {
@@ -1719,7 +1731,8 @@
};
}
- }, true, IndexWriter.MaxFieldLength.LIMITED);
+ });
+ IndexWriter writer = new IndexWriter(dir, conf);
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
@@ -1804,8 +1817,7 @@
failure.setDoFail();
dir.failOn(failure);
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
doc.add(new Field("content", contents, Field.Store.NO,
@@ -1860,7 +1872,7 @@
for(int i=0;i<2;i++) {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
//writer.setInfoStream(System.out);
Document doc = new Document();
doc.add(new Field("contents", "here are some contents", Field.Store.YES,
@@ -1903,8 +1915,8 @@
assertEquals(1, numDel);
- writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ analyzer).setMaxBufferedDocs(10));
doc = new Document();
doc.add(new Field("contents", "here are some contents", Field.Store.YES,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -1948,7 +1960,7 @@
MockRAMDirectory dir = new MockRAMDirectory();
{
- final IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
final int finalI = i;
@@ -2017,8 +2029,8 @@
assertEquals(NUM_THREAD*NUM_ITER, numDel);
- IndexWriter writer = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
Document doc = new Document();
doc.add(new Field("contents", "here are some contents", Field.Store.YES,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -2051,10 +2063,11 @@
MockRAMDirectory dir = new MockRAMDirectory();
int delID = 0;
for(int i=0;i<20;i++) {
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
- writer.setMergeFactor(2);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setMergeFactor(2);
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
Document doc = new Document();
String contents = "aa bb cc dd ee ff gg hh ii jj kk";
@@ -2087,8 +2100,10 @@
reader.close();
if (0 == i % 4) {
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(false);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
+ LogMergePolicy lmp2 = (LogMergePolicy) writer.getMergePolicy();
+ lmp2.setUseCompoundFile(false);
+ lmp2.setUseCompoundDocStore(false);
writer.optimize();
writer.close();
}
@@ -2104,21 +2119,18 @@
for(int pass=0;pass<2;pass++) {
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
-
+ IndexWriterConfig conf = new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE)
+ .setMaxBufferedDocs(2);
+ if (pass == 2) {
+ conf.setMergeScheduler(new SerialMergeScheduler());
+ }
+ IndexWriter writer = new IndexWriter(directory, conf);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
+
//System.out.println("TEST: pass=" + pass + " cms=" + (pass >= 2));
for(int iter=0;iter<10;iter++) {
//System.out.println("TEST: iter=" + iter);
- MergeScheduler ms;
- if (pass == 1)
- ms = new ConcurrentMergeScheduler();
- else
- ms = new SerialMergeScheduler();
-
- writer.setMergeScheduler(ms);
- writer.setMaxBufferedDocs(2);
- writer.setMergeFactor(100);
-
for(int j=0;j<199;j++) {
idField.setValue(Integer.toString(iter*201+j));
writer.addDocument(doc);
@@ -2132,7 +2144,7 @@
// Force a bunch of merge threads to kick off so we
// stress out aborting them on close:
- writer.setMergeFactor(2);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
final IndexWriter finalWriter = writer;
final ArrayList failure = new ArrayList();
@@ -2176,7 +2188,7 @@
reader.close();
// Reopen
- writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
}
writer.close();
}
@@ -2256,16 +2268,12 @@
for(int iter=0;iter<7;iter++) {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- ConcurrentMergeScheduler cms = new ConcurrentMergeScheduler();
-
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10);
// We expect AlreadyClosedException
- cms.setSuppressExceptions();
+ ((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
+ IndexWriter writer = new IndexWriter(dir, conf);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(4);
- writer.setMergeScheduler(cms);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(4);
-
IndexerThread[] threads = new IndexerThread[NUM_THREADS];
for(int i=0;i data = new HashMap();
@@ -4021,7 +4007,7 @@
assertEquals("test1", r.getCommitUserData().get("label"));
r.close();
- w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
w.optimize();
w.close();
@@ -4032,17 +4018,17 @@
public void testOptimizeExceptions() throws IOException {
RAMDirectory startDir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(startDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
- w.setMaxBufferedDocs(2);
- w.setMergeFactor(100);
+ IndexWriter w = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
+ ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(100);
for(int i=0;i<27;i++)
addDoc(w);
w.close();
for(int i=0;i<200;i++) {
MockRAMDirectory dir = new MockRAMDirectory(startDir);
- w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
- ((ConcurrentMergeScheduler) w.getMergeScheduler()).setSuppressExceptions();
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
+ ((ConcurrentMergeScheduler) conf.getMergeScheduler()).setSuppressExceptions();
+ w = new IndexWriter(dir, conf);
dir.setRandomIOExceptionRate(0.5, 100);
try {
w.optimize();
@@ -4060,7 +4046,9 @@
final List thrown = new ArrayList();
- final IndexWriter writer = new IndexWriter(new MockRAMDirectory(), new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED) {
+ final IndexWriter writer = new IndexWriter(new MockRAMDirectory(),
+ new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT))) {
@Override
public void message(final String message) {
if (message.startsWith("now flush at close") && 0 == thrown.size()) {
@@ -4085,7 +4073,8 @@
// LUCENE-1442
public void testDoubleOffsetCounting() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.NOT_ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(f);
@@ -4120,7 +4109,7 @@
// LUCENE-1442
public void testDoubleOffsetCounting2() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(f);
@@ -4142,7 +4131,7 @@
// LUCENE-1448
public void testEndOffsetPositionCharAnalyzer() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd ", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(f);
@@ -4165,7 +4154,7 @@
public void testEndOffsetPositionWithCachingTokenFilter() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter w = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
TokenStream stream = new CachingTokenFilter(analyzer.tokenStream("field", new StringReader("abcd ")));
Field f = new Field("field", stream, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -4189,7 +4178,7 @@
public void testEndOffsetPositionWithTeeSinkTokenFilter() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
Analyzer analyzer = new WhitespaceAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter w = new IndexWriter(dir, analyzer, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
TeeSinkTokenFilter tee = new TeeSinkTokenFilter(analyzer.tokenStream("field", new StringReader("abcd ")));
TokenStream sink = tee.newSinkTokenStream();
@@ -4214,7 +4203,8 @@
// LUCENE-1448
public void testEndOffsetPositionStopFilter() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StopAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StopAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd the", Field.Store.NO, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
doc.add(f);
@@ -4236,7 +4226,9 @@
// LUCENE-1448
public void testEndOffsetPositionStandard() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd the ", Field.Store.NO,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -4266,7 +4258,9 @@
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "", Field.Store.NO,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS);
@@ -4293,7 +4287,9 @@
// LUCENE-1448
public void testEndOffsetPositionStandardEmptyField2() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new StandardAnalyzer(
+ TEST_VERSION_CURRENT)));
Document doc = new Document();
Field f = new Field("field", "abcd", Field.Store.NO,
@@ -4335,7 +4331,7 @@
out.writeByte((byte) 42);
out.close();
- new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED).close();
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))).close();
assertTrue(dir.fileExists("myrandomfile"));
@@ -4351,8 +4347,7 @@
public void testDeadlock() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
Document doc = new Document();
doc.add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES,
Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS));
@@ -4363,7 +4358,7 @@
// index has 2 segments
MockRAMDirectory dir2 = new MockRAMDirectory();
- IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer2.addDocument(doc);
writer2.close();
@@ -4401,7 +4396,8 @@
if (w != null) {
w.close();
}
- w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
+ ((LogMergePolicy) w.getMergePolicy()).setMergeFactor(2);
//((ConcurrentMergeScheduler) w.getMergeScheduler()).setSuppressExceptions();
if (!first && !allowInterrupt) {
@@ -4410,8 +4406,6 @@
allowInterrupt = true;
}
- w.setMaxBufferedDocs(2);
- w.setMergeFactor(2);
Document doc = new Document();
doc.add(new Field("field", "some text contents", Field.Store.YES, Field.Index.ANALYZED));
for(int i=0;i<100;i++) {
@@ -4510,7 +4504,7 @@
public void testIndexStoreCombos() throws Exception {
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
byte[] b = new byte[50];
for(int i=0;i<50;i++)
b[i] = (byte) (i+77);
@@ -4572,7 +4566,7 @@
// LUCENE-1727: make sure doc fields are stored in order
public void testStoredFieldsOrder() throws Throwable {
Directory d = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("zzz", "a b c", Field.Store.YES, Field.Index.NO));
doc.add(new Field("aaa", "a b c", Field.Store.YES, Field.Index.NO));
@@ -4604,7 +4598,7 @@
public void testEmbeddedFFFF() throws Throwable {
Directory d = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(d, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "a a\uffffb", Field.Store.NO, Field.Index.ANALYZED));
w.addDocument(doc);
@@ -4619,8 +4613,11 @@
public void testNoDocsIndex() throws Throwable {
Directory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
writer.setInfoStream(new PrintStream(bos));
writer.addDocument(new Document());
@@ -4637,7 +4634,8 @@
final int NUM_THREADS = 5;
final double RUN_SEC = 0.5;
final Directory dir = new MockRAMDirectory();
- final IndexWriter w = new IndexWriter(dir, new SimpleAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
w.commit();
final AtomicBoolean failed = new AtomicBoolean();
Thread[] threads = new Thread[NUM_THREADS];
@@ -4688,7 +4686,7 @@
for(int iter=0;iter<2;iter++) {
Directory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "go", Field.Store.NO, Field.Index.ANALYZED));
w.addDocument(doc);
Index: src/test/org/apache/lucene/index/TestIndexWriterConfig.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterConfig.java (revision 0)
+++ src/test/org/apache/lucene/index/TestIndexWriterConfig.java (revision 0)
@@ -0,0 +1,271 @@
+package org.apache.lucene.index;
+
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.HashSet;
+import java.util.Set;
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.index.DocumentsWriter.IndexingChain;
+import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.DefaultSimilarity;
+import org.apache.lucene.search.Similarity;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCaseJ4;
+import org.junit.Test;
+
+public class TestIndexWriterConfig extends LuceneTestCaseJ4 {
+
+ private static final class MySimilarity extends DefaultSimilarity {
+ // Does not implement anything - used only for type checking on IndexWriterConfig.
+ }
+
+ private static final class MyIndexingChain extends IndexingChain {
+ // Does not implement anything - used only for type checking on IndexWriterConfig.
+
+ @Override
+ DocConsumer getChain(DocumentsWriter documentsWriter) {
+ return null;
+ }
+
+ }
+
+ private static final class MyWarmer extends IndexReaderWarmer {
+ // Does not implement anything - used only for type checking on IndexWriterConfig.
+
+ @Override
+ public void warm(IndexReader reader) throws IOException {
+ }
+
+ }
+
+ @Test
+ public void testDefaults() throws Exception {
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
+ assertEquals(WhitespaceAnalyzer.class, conf.getAnalyzer().getClass());
+ assertNull(conf.getIndexCommit());
+ assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
+ assertEquals(IndexWriterConfig.UNLIMITED_FIELD_LENGTH, conf.getMaxFieldLength());
+ assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
+ assertEquals(OpenMode.CREATE_OR_APPEND, conf.getOpenMode());
+ assertTrue(Similarity.getDefault() == conf.getSimilarity());
+ assertEquals(IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL, conf.getTermIndexInterval());
+ assertEquals(IndexWriterConfig.getDefaultWriteLockTimeout(), conf.getWriteLockTimeout());
+ assertEquals(IndexWriterConfig.WRITE_LOCK_TIMEOUT, IndexWriterConfig.getDefaultWriteLockTimeout());
+ assertEquals(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS, conf.getMaxBufferedDeleteTerms());
+ assertEquals(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, conf.getRAMBufferSizeMB(), 0.0);
+ assertEquals(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS, conf.getMaxBufferedDocs());
+ assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
+ assertNull(conf.getMergedSegmentWarmer());
+
+ // Sanity check - validate that all getters are covered.
+ Set getters = new HashSet();
+ getters.add("getAnalyzer");
+ getters.add("getIndexCommit");
+ getters.add("getIndexDeletionPolicy");
+ getters.add("getMaxFieldLength");
+ getters.add("getMergeScheduler");
+ getters.add("getOpenMode");
+ getters.add("getSimilarity");
+ getters.add("getTermIndexInterval");
+ getters.add("getWriteLockTimeout");
+ getters.add("getDefaultWriteLockTimeout");
+ getters.add("getMaxBufferedDeleteTerms");
+ getters.add("getRAMBufferSizeMB");
+ getters.add("getMaxBufferedDocs");
+ getters.add("getIndexingChain");
+ getters.add("getMergedSegmentWarmer");
+ for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
+ if (m.getDeclaringClass() == IndexWriterConfig.class && m.getName().startsWith("get")) {
+ assertTrue("method " + m.getName() + " is not tested for defaults", getters.contains(m.getName()));
+ }
+ }
+ }
+
+ @Test
+ public void testSettersChaining() throws Exception {
+ // Ensures that every setter returns IndexWriterConfig to enable easy
+ // chaining.
+ for (Method m : IndexWriterConfig.class.getDeclaredMethods()) {
+ if (m.getDeclaringClass() == IndexWriterConfig.class
+ && m.getName().startsWith("set")
+ && !Modifier.isStatic(m.getModifiers())) {
+ assertEquals("method " + m.getName() + " does not return IndexWriterConfig",
+ IndexWriterConfig.class, m.getReturnType());
+ }
+ }
+ }
+
+ @Test
+ public void testConstants() throws Exception {
+ // Tests that the values of the constants does not change
+ assertEquals(1000, IndexWriterConfig.WRITE_LOCK_TIMEOUT);
+ assertEquals(128, IndexWriterConfig.DEFAULT_TERM_INDEX_INTERVAL);
+ assertEquals(Integer.MAX_VALUE, IndexWriterConfig.UNLIMITED_FIELD_LENGTH);
+ assertEquals(-1, IndexWriterConfig.DISABLE_AUTO_FLUSH);
+ assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DELETE_TERMS);
+ assertEquals(IndexWriterConfig.DISABLE_AUTO_FLUSH, IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
+ assertEquals(16.0, IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB, 0.0);
+ }
+
+ @Test
+ public void testToString() throws Exception {
+ String str = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).toString();
+ for (Field f : IndexWriterConfig.class.getDeclaredFields()) {
+ int modifiers = f.getModifiers();
+ if (Modifier.isStatic(modifiers) && Modifier.isFinal(modifiers)) {
+ // Skip static final fields, they are only constants
+ continue;
+ } else if ("indexingChain".equals(f.getName())) {
+ // indexingChain is a package-private setting and thus is not output by
+ // toString.
+ continue;
+ }
+ assertTrue(f.getName() + " not found in toString", str.indexOf(f.getName()) != -1);
+ }
+ }
+
+ @Test
+ public void testClone() throws Exception {
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
+ IndexWriterConfig clone = (IndexWriterConfig) conf.clone();
+
+ // Clone is shallow since not all parameters are cloneable.
+ assertTrue(conf.getIndexDeletionPolicy() == clone.getIndexDeletionPolicy());
+
+ conf.setMergeScheduler(new SerialMergeScheduler());
+ assertEquals(ConcurrentMergeScheduler.class, clone.getMergeScheduler().getClass());
+ }
+
+ @Test
+ public void testInvalidValues() throws Exception {
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
+
+ // Test IndexDeletionPolicy
+ assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
+ conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(null));
+ assertEquals(SnapshotDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
+ conf.setIndexDeletionPolicy(null);
+ assertEquals(KeepOnlyLastCommitDeletionPolicy.class, conf.getIndexDeletionPolicy().getClass());
+
+ // Test MergeScheduler
+ assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
+ conf.setMergeScheduler(new SerialMergeScheduler());
+ assertEquals(SerialMergeScheduler.class, conf.getMergeScheduler().getClass());
+ conf.setMergeScheduler(null);
+ assertEquals(ConcurrentMergeScheduler.class, conf.getMergeScheduler().getClass());
+
+ // Test Similarity
+ assertTrue(Similarity.getDefault() == conf.getSimilarity());
+ conf.setSimilarity(new MySimilarity());
+ assertEquals(MySimilarity.class, conf.getSimilarity().getClass());
+ conf.setSimilarity(null);
+ assertTrue(Similarity.getDefault() == conf.getSimilarity());
+
+ // Test IndexingChain
+ assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
+ conf.setIndexingChain(new MyIndexingChain());
+ assertEquals(MyIndexingChain.class, conf.getIndexingChain().getClass());
+ conf.setIndexingChain(null);
+ assertTrue(DocumentsWriter.defaultIndexingChain == conf.getIndexingChain());
+
+ try {
+ conf.setMaxBufferedDeleteTerms(0);
+ fail("should not have succeeded to set maxBufferedDeleteTerms to 0");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
+ try {
+ conf.setMaxBufferedDocs(1);
+ fail("should not have succeeded to set maxBufferedDocs to 1");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
+ try {
+ // Disable both MAX_BUF_DOCS and RAM_SIZE_MB
+ conf.setMaxBufferedDocs(4);
+ conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+ conf.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+ fail("should not have succeeded to disable maxBufferedDocs when ramBufferSizeMB is disabled as well");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
+ conf.setRAMBufferSizeMB(IndexWriterConfig.DEFAULT_RAM_BUFFER_SIZE_MB);
+ conf.setMaxBufferedDocs(IndexWriterConfig.DEFAULT_MAX_BUFFERED_DOCS);
+ try {
+ conf.setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH);
+ fail("should not have succeeded to disable ramBufferSizeMB when maxBufferedDocs is disabled as well");
+ } catch (IllegalArgumentException e) {
+ // this is expected
+ }
+
+ }
+
+ /**
+ * @deprecated should be removed once all the deprecated setters are removed
+ * from IndexWriter.
+ */
+ @Test
+ public void testIndexWriterSetters() throws Exception {
+ // This test intentionally tests deprecated methods. The purpose is to pass
+ // whatever the user set on IW to IWC, so that if the user calls
+ // iw.getConfig().getXYZ(), he'll get the same value he passed to
+ // iw.setXYZ().
+ IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT));
+ Directory dir = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(dir, conf);
+
+ writer.setSimilarity(new MySimilarity());
+ assertEquals(MySimilarity.class, writer.getConfig().getSimilarity().getClass());
+
+ writer.setMaxBufferedDeleteTerms(4);
+ assertEquals(4, writer.getConfig().getMaxBufferedDeleteTerms());
+
+ writer.setMaxBufferedDocs(10);
+ assertEquals(10, writer.getConfig().getMaxBufferedDocs());
+
+ writer.setMaxFieldLength(10);
+ assertEquals(10, writer.getConfig().getMaxFieldLength());
+
+ writer.setMergeScheduler(new SerialMergeScheduler());
+ assertEquals(SerialMergeScheduler.class, writer.getConfig().getMergeScheduler().getClass());
+
+ writer.setRAMBufferSizeMB(1.5);
+ assertEquals(1.5, writer.getConfig().getRAMBufferSizeMB(), 0.0);
+
+ writer.setTermIndexInterval(40);
+ assertEquals(40, writer.getConfig().getTermIndexInterval());
+
+ writer.setWriteLockTimeout(100);
+ assertEquals(100, writer.getConfig().getWriteLockTimeout());
+
+ writer.setMergedSegmentWarmer(new MyWarmer());
+ assertEquals(MyWarmer.class, writer.getConfig().getMergedSegmentWarmer().getClass());
+ }
+}
Property changes on: src\test\org\apache\lucene\index\TestIndexWriterConfig.java
___________________________________________________________________
Added: svn:keywords
+ Date Author Id Revision HeadURL
Added: svn:eol-style
+ native
Index: src/test/org/apache/lucene/index/TestIndexWriterDelete.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterDelete.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterDelete.java (working copy)
@@ -41,10 +41,8 @@
String[] text = { "Amsterdam", "Venice" };
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setUseCompoundFile(true);
- modifier.setMaxBufferedDeleteTerms(1);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDeleteTerms(1));
for (int i = 0; i < keywords.length; i++) {
Document doc = new Document();
@@ -78,10 +76,9 @@
public void testNonRAMDelete() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(2);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2)
+ .setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@@ -113,9 +110,8 @@
public void testMaxBufferedDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDeleteTerms(1);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDeleteTerms(1));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
writer.deleteDocuments(new Term("foobar", "1"));
@@ -128,10 +124,9 @@
public void testRAMDeletes() throws IOException {
for(int t=0;t<2;t++) {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(4);
- modifier.setMaxBufferedDeleteTerms(4);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(4)
+ .setMaxBufferedDeleteTerms(4));
int id = 0;
int value = 100;
@@ -170,10 +165,9 @@
// test when delete terms apply to both disk and ram segments
public void testBothDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(100);
- modifier.setMaxBufferedDeleteTerms(100);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(100)
+ .setMaxBufferedDeleteTerms(100));
int id = 0;
int value = 100;
@@ -203,10 +197,9 @@
// test that batched delete terms are flushed together
public void testBatchDeletes() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(2);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2)
+ .setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@@ -247,10 +240,9 @@
// test deleteAll()
public void testDeleteAll() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(2);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2)
+ .setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@@ -294,10 +286,9 @@
// test rollback of deleteAll()
public void testDeleteAllRollback() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(2);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2)
+ .setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@@ -332,10 +323,9 @@
// test deleteAll() w/ near real-time reader
public void testDeleteAllNRT() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setMaxBufferedDocs(2);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2)
+ .setMaxBufferedDeleteTerms(2));
int id = 0;
int value = 100;
@@ -424,8 +414,7 @@
// First build up a starting index:
MockRAMDirectory startDir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(startDir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(startDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < 157; i++) {
Document d = new Document();
d.add(new Field("id", Integer.toString(i), Field.Store.YES,
@@ -447,12 +436,10 @@
while (!done) {
MockRAMDirectory dir = new MockRAMDirectory(startDir);
dir.setPreventDoubleWrite(false);
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(1000)
+ .setMaxBufferedDeleteTerms(1000));
- modifier.setMaxBufferedDocs(1000); // use flush or close
- modifier.setMaxBufferedDeleteTerms(1000); // use flush or close
-
// For each disk size, first try to commit against
// dir that will hit random IOExceptions & disk
// full; after, give it infinite disk space & turn
@@ -653,10 +640,11 @@
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- modifier.setUseCompoundFile(true);
- modifier.setMaxBufferedDeleteTerms(2);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDeleteTerms(2));
+ LogMergePolicy lmp = (LogMergePolicy) modifier.getMergePolicy();
+ lmp.setUseCompoundFile(true);
+ lmp.setUseCompoundDocStore(true);
dir.failOn(failure.reset());
@@ -762,8 +750,7 @@
String[] text = { "Amsterdam", "Venice" };
MockRAMDirectory dir = new MockRAMDirectory();
- IndexWriter modifier = new IndexWriter(dir,
- new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter modifier = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
dir.failOn(failure.reset());
Index: src/test/org/apache/lucene/index/TestIndexWriterExceptions.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterExceptions.java (working copy)
@@ -24,7 +24,6 @@
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.Directory;
-import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@@ -111,11 +110,11 @@
ThreadLocal doFail = new ThreadLocal();
- public class MockIndexWriter extends IndexWriter {
+ private class MockIndexWriter extends IndexWriter {
Random r = new java.util.Random(17);
- public MockIndexWriter(Directory dir, Analyzer a, boolean create, MaxFieldLength mfl) throws IOException {
- super(dir, a, create, mfl);
+ public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
+ super(dir, conf);
}
@Override
@@ -134,10 +133,9 @@
public void testRandomExceptions() throws Throwable {
MockRAMDirectory dir = new MockRAMDirectory();
- MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
+ MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setRAMBufferSizeMB(0.1));
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
- writer.setRAMBufferSizeMB(0.1);
if (DEBUG)
writer.setInfoStream(System.out);
@@ -172,10 +170,9 @@
public void testRandomExceptionsThreads() throws Throwable {
MockRAMDirectory dir = new MockRAMDirectory();
- MockIndexWriter writer = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).setSuppressExceptions();
+ MockIndexWriter writer = new MockIndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setRAMBufferSizeMB(0.2));
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).setSuppressExceptions();
//writer.setMaxBufferedDocs(10);
- writer.setRAMBufferSizeMB(0.2);
if (DEBUG)
writer.setInfoStream(System.out);
Index: src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterLockRelease.java (working copy)
@@ -21,7 +21,9 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.FSDirectory;
/**
@@ -74,10 +76,14 @@
public void testIndexWriterLockRelease() throws IOException {
FSDirectory dir = FSDirectory.open(this.__test_dir);
try {
- new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e) {
try {
- new IndexWriter(dir, new org.apache.lucene.analysis.standard.StandardAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND));
} catch (FileNotFoundException e1) {
}
} finally {
Index: src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java (working copy)
@@ -22,6 +22,7 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util._TestUtil;
@@ -34,9 +35,8 @@
public void testNormalCase() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 100; i++) {
@@ -51,9 +51,8 @@
public void testNoOverMerge() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
boolean noOverMerge = false;
@@ -73,9 +72,8 @@
public void testForceFlush() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(10);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
mp.setMinMergeDocs(100);
writer.setMergePolicy(mp);
@@ -84,11 +82,11 @@
addDoc(writer);
writer.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(mp);
mp.setMinMergeDocs(100);
- writer.setMergeFactor(10);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
checkInvariants(writer);
}
@@ -99,9 +97,8 @@
public void testMergeFactorChange() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(100);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 250; i++) {
@@ -109,7 +106,7 @@
checkInvariants(writer);
}
- writer.setMergeFactor(5);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@@ -125,9 +122,8 @@
public void testMaxBufferedDocsChange() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(101);
- writer.setMergeFactor(101);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(101));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
// leftmost* segment has 1 doc
@@ -139,14 +135,17 @@
}
writer.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
- writer.setMaxBufferedDocs(101);
- writer.setMergeFactor(101);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(101));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
}
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(10);
+ writer.close();
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
+ writer.setMergePolicy(new LogDocMergePolicy(writer));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
@@ -159,7 +158,7 @@
addDoc(writer);
}
writer.commit();
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
@@ -170,10 +169,9 @@
public void testMergeDocCount0() throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(100);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
for (int i = 0; i < 250; i++) {
addDoc(writer);
@@ -185,17 +183,17 @@
reader.deleteDocuments(new Term("content", "aaa"));
reader.close();
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(5);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge factor is changed, so check invariants after all adds
for (int i = 0; i < 10; i++) {
addDoc(writer);
}
writer.commit();
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
assertEquals(10, writer.maxDoc());
@@ -211,9 +209,9 @@
private void checkInvariants(IndexWriter writer) throws IOException {
_TestUtil.syncConcurrentMerges(writer);
- int maxBufferedDocs = writer.getMaxBufferedDocs();
- int mergeFactor = writer.getMergeFactor();
- int maxMergeDocs = writer.getMaxMergeDocs();
+ int maxBufferedDocs = writer.getConfig().getMaxBufferedDocs();
+ int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
+ int maxMergeDocs = ((LogMergePolicy) writer.getMergePolicy()).getMaxMergeDocs();
int ramSegmentCount = writer.getNumBufferedDocuments();
assertTrue(ramSegmentCount < maxBufferedDocs);
Index: src/test/org/apache/lucene/index/TestIndexWriterMerging.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterMerging.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterMerging.java (working copy)
@@ -20,6 +20,7 @@
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.util.LuceneTestCase;
import java.io.IOException;
@@ -56,8 +57,8 @@
Directory merged = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(merged, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMergeFactor(2);
+ IndexWriter writer = new IndexWriter(merged, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
writer.addIndexesNoOptimize(new Directory[]{indexA, indexB});
writer.optimize();
@@ -90,12 +91,13 @@
return fail;
}
- private void fillIndex(Directory dir, int start, int numDocs) throws IOException
- {
+ private void fillIndex(Directory dir, int start, int numDocs) throws IOException {
- IndexWriter writer = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMergeFactor(2);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT,
+ new StandardAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(2));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
for (int i = start; i < (start + numDocs); i++)
{
Index: src/test/org/apache/lucene/index/TestIndexWriterReader.java
===================================================================
--- src/test/org/apache/lucene/index/TestIndexWriterReader.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestIndexWriterReader.java (working copy)
@@ -75,8 +75,7 @@
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
// create the index
createIndexNoClose(!optimize, "index1", writer);
@@ -110,8 +109,7 @@
assertEquals(0, count(new Term("id", id10), r3));
assertEquals(1, count(new Term("id", Integer.toString(8000)), r3));
- writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);
@@ -138,8 +136,7 @@
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@@ -147,8 +144,7 @@
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
- IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@@ -185,14 +181,12 @@
boolean optimize = false;
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
// create a 2nd index
Directory dir2 = new MockRAMDirectory();
- IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer2.setInfoStream(infoStream);
createIndexNoClose(!optimize, "index2", writer2);
writer2.close();
@@ -220,8 +214,7 @@
boolean optimize = true;
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
// create the index
createIndexNoClose(!optimize, "index1", writer);
@@ -259,8 +252,7 @@
writer.close();
// reopen the writer to verify the delete made it to the directory
- writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
IndexReader w2r1 = writer.getReader();
assertEquals(0, count(new Term("id", id10), w2r1));
@@ -274,8 +266,7 @@
int numDirs = 3;
Directory mainDir = new MockRAMDirectory();
- IndexWriter mainWriter = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter mainWriter = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
mainWriter.setInfoStream(infoStream);
AddDirectoriesThreads addDirThreads = new AddDirectoriesThreads(numIter, mainWriter);
addDirThreads.launchThreads(numDirs);
@@ -318,9 +309,7 @@
this.numDirs = numDirs;
this.mainWriter = mainWriter;
addDir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(addDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(2);
+ IndexWriter writer = new IndexWriter(addDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
for (int i = 0; i < NUM_INIT_DOCS; i++) {
Document doc = createDocument(i, "addindex", 4);
writer.addDocument(doc);
@@ -426,8 +415,7 @@
*/
public void doTestIndexWriterReopenSegment(boolean optimize) throws Exception {
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
IndexReader r1 = writer.getReader();
assertEquals(0, r1.maxDoc());
@@ -464,8 +452,7 @@
writer.close();
// test whether the changes made it to the directory
- writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
IndexReader w2r1 = writer.getReader();
// insure the deletes were actually flushed to the directory
assertEquals(200, w2r1.maxDoc());
@@ -504,8 +491,7 @@
public static void createIndex(Directory dir1, String indexName,
boolean multiSegment) throws IOException {
- IndexWriter w = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
w.setMergePolicy(new LogDocMergePolicy(w));
for (int i = 0; i < 100; i++) {
w.addDocument(createDocument(i, indexName, 4));
@@ -539,8 +525,11 @@
public void testMergeWarmer() throws Exception {
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ // Enroll warmer
+ MyWarmer warmer = new MyWarmer();
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setMaxBufferedDocs(2).setMergedSegmentWarmer(warmer));
writer.setInfoStream(infoStream);
// create the index
@@ -548,17 +537,13 @@
// get a reader to put writer into near real-time mode
IndexReader r1 = writer.getReader();
+
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
- // Enroll warmer
- MyWarmer warmer = new MyWarmer();
- writer.setMergedSegmentWarmer(warmer);
- writer.setMergeFactor(2);
- writer.setMaxBufferedDocs(2);
-
for (int i = 0; i < 10; i++) {
writer.addDocument(createDocument(i, "test", 4));
}
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
assertTrue(warmer.warmCount > 0);
final int count = warmer.warmCount;
@@ -574,8 +559,7 @@
public void testAfterCommit() throws Exception {
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
// create the index
@@ -591,7 +575,7 @@
for (int i = 0; i < 10; i++) {
writer.addDocument(createDocument(i, "test", 4));
}
- ((ConcurrentMergeScheduler) writer.getMergeScheduler()).sync();
+ ((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
IndexReader r2 = r1.reopen();
if (r2 != r1) {
@@ -607,8 +591,7 @@
// Make sure reader remains usable even if IndexWriter closes
public void testAfterClose() throws Exception {
Directory dir1 = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
// create the index
@@ -637,10 +620,9 @@
// Stress test reopen during addIndexes
public void testDuringAddIndexes() throws Exception {
Directory dir1 = new MockRAMDirectory();
- final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
- writer.setMergeFactor(2);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
// create the index
createIndexNoClose(false, "test", writer);
@@ -715,10 +697,9 @@
// Stress test reopen during add/delete
public void testDuringAddDelete() throws Exception {
Directory dir1 = new MockRAMDirectory();
- final IndexWriter writer = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
writer.setInfoStream(infoStream);
- writer.setMergeFactor(2);
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
// create the index
createIndexNoClose(false, "test", writer);
@@ -796,8 +777,7 @@
public void testExpungeDeletes() throws Throwable {
Directory dir = new MockRAMDirectory();
- final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
@@ -821,8 +801,7 @@
public void testDeletesNumDocs() throws Throwable {
Directory dir = new MockRAMDirectory();
- final IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field", "a b c", Field.Store.NO, Field.Index.ANALYZED));
Field id = new Field("id", "", Field.Store.NO, Field.Index.NOT_ANALYZED);
Index: src/test/org/apache/lucene/index/TestLazyBug.java
===================================================================
--- src/test/org/apache/lucene/index/TestLazyBug.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestLazyBug.java (working copy)
@@ -17,16 +17,24 @@
* limitations under the License.
*/
-import org.apache.lucene.util.LuceneTestCase;
-import org.apache.lucene.analysis.Analyzer;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+
import org.apache.lucene.analysis.SimpleAnalyzer;
-import org.apache.lucene.document.*;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FieldSelector;
+import org.apache.lucene.document.FieldSelectorResult;
+import org.apache.lucene.document.Fieldable;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.LuceneTestCase;
-import java.util.*;
-
/**
* Test demonstrating EOF bug on the last field of the last doc
* if other docs have allready been accessed.
@@ -63,11 +71,12 @@
Directory dir = new RAMDirectory();
try {
Random r = newRandom();
- Analyzer analyzer = new SimpleAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
- writer.setUseCompoundFile(false);
-
for (int d = 1; d <= NUM_DOCS; d++) {
Document doc = new Document();
for (int f = 1; f <= NUM_FIELDS; f++ ) {
Index: src/test/org/apache/lucene/index/TestLazyProxSkipping.java
===================================================================
--- src/test/org/apache/lucene/index/TestLazyProxSkipping.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestLazyProxSkipping.java (working copy)
@@ -60,9 +60,9 @@
int numDocs = 500;
Directory directory = new SeekCountingDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(false);
- writer.setMaxBufferedDocs(10);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
for (int i = 0; i < numDocs; i++) {
Document doc = new Document();
String content;
@@ -118,7 +118,7 @@
public void testSeek() throws IOException {
Directory directory = new RAMDirectory();
- IndexWriter writer = new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new Field(this.field, "a b", Field.Store.YES, Field.Index.ANALYZED));
Index: src/test/org/apache/lucene/index/TestMultiLevelSkipList.java
===================================================================
--- src/test/org/apache/lucene/index/TestMultiLevelSkipList.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestMultiLevelSkipList.java (working copy)
@@ -44,8 +44,7 @@
public class TestMultiLevelSkipList extends LuceneTestCase {
public void testSimpleSkip() throws IOException {
RAMDirectory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new PayloadAnalyzer(), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new PayloadAnalyzer()));
Term term = new Term("test", "a");
for (int i = 0; i < 5000; i++) {
Document d1 = new Document();
Index: src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java
===================================================================
--- src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java (working copy)
@@ -32,13 +32,12 @@
public void testIndexing() throws Exception {
Directory mainDir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(mainDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = new IndexWriter(mainDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
IndexReader reader = writer.getReader(); // start pooling readers
reader.close();
- writer.setMergeFactor(2);
- writer.setMaxBufferedDocs(10);
RunThread[] indexThreads = new RunThread[4];
for (int x=0; x < indexThreads.length; x++) {
indexThreads[x] = new RunThread(x % 2, writer);
Index: src/test/org/apache/lucene/index/TestNorms.java
===================================================================
--- src/test/org/apache/lucene/index/TestNorms.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestNorms.java (working copy)
@@ -26,6 +26,7 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.DefaultSimilarity;
import org.apache.lucene.search.Similarity;
import org.apache.lucene.store.Directory;
@@ -99,9 +100,10 @@
Directory dir3 = new RAMDirectory();
createIndex(dir3);
- IndexWriter iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
- iw.setMaxBufferedDocs(5);
- iw.setMergeFactor(3);
+ IndexWriter iw = new IndexWriter(dir3, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
+ .setMaxBufferedDocs(5));
+ ((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.addIndexesNoOptimize(new Directory[]{dir1,dir2});
iw.optimize();
iw.close();
@@ -117,9 +119,9 @@
doTestNorms(dir3);
// now with optimize
- iw = new IndexWriter(dir3,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
- iw.setMaxBufferedDocs(5);
- iw.setMergeFactor(3);
+ iw = new IndexWriter(dir3, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ anlzr).setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(5));
+ ((LogMergePolicy) iw.getMergePolicy()).setMergeFactor(3);
iw.optimize();
iw.close();
verifyIndex(dir3);
@@ -143,11 +145,13 @@
}
private void createIndex(Directory dir) throws IOException {
- IndexWriter iw = new IndexWriter(dir,anlzr,true, IndexWriter.MaxFieldLength.LIMITED);
- iw.setMaxBufferedDocs(5);
- iw.setMergeFactor(3);
- iw.setSimilarity(similarityOne);
- iw.setUseCompoundFile(true);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.CREATE)
+ .setMaxBufferedDocs(5).setSimilarity(similarityOne));
+ LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
+ lmp.setMergeFactor(3);
+ lmp.setUseCompoundFile(true);
+ lmp.setUseCompoundDocStore(true);
iw.close();
}
@@ -185,11 +189,13 @@
}
private void addDocs(Directory dir, int ndocs, boolean compound) throws IOException {
- IndexWriter iw = new IndexWriter(dir,anlzr,false, IndexWriter.MaxFieldLength.LIMITED);
- iw.setMaxBufferedDocs(5);
- iw.setMergeFactor(3);
- iw.setSimilarity(similarityOne);
- iw.setUseCompoundFile(compound);
+ IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, anlzr).setOpenMode(OpenMode.APPEND)
+ .setMaxBufferedDocs(5).setSimilarity(similarityOne));
+ LogMergePolicy lmp = (LogMergePolicy) iw.getMergePolicy();
+ lmp.setMergeFactor(3);
+ lmp.setUseCompoundFile(compound);
+ lmp.setUseCompoundDocStore(compound);
for (int i = 0; i < ndocs; i++) {
iw.addDocument(newDoc());
}
Index: src/test/org/apache/lucene/index/TestOmitTf.java
===================================================================
--- src/test/org/apache/lucene/index/TestOmitTf.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestOmitTf.java (working copy)
@@ -67,7 +67,7 @@
public void testOmitTermFreqAndPositions() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document d = new Document();
// this field will have Tf
@@ -113,9 +113,9 @@
public void testMixedMerge() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(3);
- writer.setMergeFactor(2);
+ IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
Document d = new Document();
// this field will have Tf
@@ -166,9 +166,9 @@
public void testMixedRAM() throws Exception {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(10);
- writer.setMergeFactor(2);
+ IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(10));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
Document d = new Document();
// this field will have Tf
@@ -214,10 +214,12 @@
public void testNoPrxFile() throws Throwable {
Directory ram = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMaxBufferedDocs(3);
- writer.setMergeFactor(2);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(3));
+ LogMergePolicy lmp = (LogMergePolicy) writer.getMergePolicy();
+ lmp.setMergeFactor(2);
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
Document d = new Document();
Field f1 = new Field("f1", "This field has term freqs", Field.Store.NO, Field.Index.ANALYZED);
@@ -245,10 +247,10 @@
public void testBasic() throws Exception {
Directory dir = new MockRAMDirectory();
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setMergeFactor(2);
- writer.setMaxBufferedDocs(2);
- writer.setSimilarity(new SimpleSimilarity());
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer).setMaxBufferedDocs(2)
+ .setSimilarity(new SimpleSimilarity()));
+ ((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(2);
StringBuilder sb = new StringBuilder(265);
Index: src/test/org/apache/lucene/index/TestParallelReader.java
===================================================================
--- src/test/org/apache/lucene/index/TestParallelReader.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestParallelReader.java (working copy)
@@ -106,7 +106,7 @@
// one document only:
Directory dir2 = new MockRAMDirectory();
- IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
w2.addDocument(d3);
@@ -151,13 +151,13 @@
Directory dir2 = getDir2();
// add another document to ensure that the indexes are not optimized
- IndexWriter modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d = new Document();
d.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
modifier.close();
- modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
d = new Document();
d.add(new Field("f2", "v2", Field.Store.YES, Field.Index.ANALYZED));
modifier.addDocument(d);
@@ -170,7 +170,7 @@
assertFalse(pr.isOptimized());
pr.close();
- modifier = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
modifier.optimize();
modifier.close();
@@ -182,7 +182,7 @@
pr.close();
- modifier = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ modifier = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
modifier.optimize();
modifier.close();
@@ -233,7 +233,7 @@
// Fields 1-4 indexed together:
private Searcher single() throws IOException {
Directory dir = new MockRAMDirectory();
- IndexWriter w = new IndexWriter(dir, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -263,7 +263,7 @@
private Directory getDir1() throws IOException {
Directory dir1 = new MockRAMDirectory();
- IndexWriter w1 = new IndexWriter(dir1, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d1 = new Document();
d1.add(new Field("f1", "v1", Field.Store.YES, Field.Index.ANALYZED));
d1.add(new Field("f2", "v1", Field.Store.YES, Field.Index.ANALYZED));
@@ -278,7 +278,7 @@
private Directory getDir2() throws IOException {
Directory dir2 = new RAMDirectory();
- IndexWriter w2 = new IndexWriter(dir2, new StandardAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter w2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new StandardAnalyzer(TEST_VERSION_CURRENT)));
Document d3 = new Document();
d3.add(new Field("f3", "v1", Field.Store.YES, Field.Index.ANALYZED));
d3.add(new Field("f4", "v1", Field.Store.YES, Field.Index.ANALYZED));
Index: src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java
===================================================================
--- src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestParallelReaderEmptyIndex.java (working copy)
@@ -28,7 +28,7 @@
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.document.Field.TermVector;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.store.RAMDirectory;
@@ -47,16 +47,14 @@
*/
public void testEmptyIndex() throws IOException {
RAMDirectory rd1 = new MockRAMDirectory();
- IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
iw.close();
RAMDirectory rd2 = new MockRAMDirectory(rd1);
RAMDirectory rdOut = new MockRAMDirectory();
- IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.UNLIMITED);
+ IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));
@@ -80,8 +78,7 @@
public void testEmptyIndexWithVectors() throws IOException {
RAMDirectory rd1 = new MockRAMDirectory();
{
- IndexWriter iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("test", "", Store.NO, Index.ANALYZED,
TermVector.YES));
@@ -95,16 +92,14 @@
ir.deleteDocument(0);
ir.close();
- iw = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), false,
- MaxFieldLength.UNLIMITED);
+ iw = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
iw.optimize();
iw.close();
}
RAMDirectory rd2 = new MockRAMDirectory();
{
- IndexWriter iw = new IndexWriter(rd2, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.UNLIMITED);
+ IndexWriter iw = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
iw.addDocument(doc);
iw.close();
@@ -112,8 +107,7 @@
RAMDirectory rdOut = new MockRAMDirectory();
- IndexWriter iwOut = new IndexWriter(rdOut, new SimpleAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.UNLIMITED);
+ IndexWriter iwOut = new IndexWriter(rdOut, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
ParallelReader pr = new ParallelReader();
pr.add(IndexReader.open(rd1,true));
pr.add(IndexReader.open(rd2,true));
Index: src/test/org/apache/lucene/index/TestParallelTermEnum.java
===================================================================
--- src/test/org/apache/lucene/index/TestParallelTermEnum.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestParallelTermEnum.java (working copy)
@@ -37,7 +37,7 @@
Document doc;
RAMDirectory rd1 = new RAMDirectory();
- IndexWriter iw1 = new IndexWriter(rd1, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw1 = new IndexWriter(rd1, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
doc = new Document();
doc.add(new Field("field1", "the quick brown fox jumps", Store.YES,
@@ -49,7 +49,7 @@
iw1.close();
RAMDirectory rd2 = new RAMDirectory();
- IndexWriter iw2 = new IndexWriter(rd2, new SimpleAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw2 = new IndexWriter(rd2, new IndexWriterConfig(TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT)));
doc = new Document();
doc.add(new Field("field0", "", Store.NO, Index.ANALYZED));
Index: src/test/org/apache/lucene/index/TestPayloads.java
===================================================================
--- src/test/org/apache/lucene/index/TestPayloads.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestPayloads.java (working copy)
@@ -36,6 +36,7 @@
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.RAMDirectory;
@@ -100,7 +101,7 @@
rnd = newRandom();
Directory ram = new RAMDirectory();
PayloadAnalyzer analyzer = new PayloadAnalyzer();
- IndexWriter writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document d = new Document();
// this field won't have any payloads
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
@@ -127,7 +128,8 @@
// now we add another document which has payloads for field f3 and verify if the SegmentMerger
// enabled payloads for that field
- writer = new IndexWriter(ram, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(ram, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ analyzer).setOpenMode(OpenMode.CREATE));
d = new Document();
d.add(new Field("f1", "This field has no payloads", Field.Store.NO, Field.Index.ANALYZED));
d.add(new Field("f2", "This field has payloads in all docs", Field.Store.NO, Field.Index.ANALYZED));
@@ -168,7 +170,9 @@
// different tests to verify the payload encoding
private void performTest(Directory dir) throws Exception {
PayloadAnalyzer analyzer = new PayloadAnalyzer();
- IndexWriter writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, analyzer)
+ .setOpenMode(OpenMode.CREATE));
// should be in sync with value in TermInfosWriter
final int skipInterval = 16;
@@ -305,7 +309,8 @@
// test long payload
analyzer = new PayloadAnalyzer();
- writer = new IndexWriter(dir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT,
+ analyzer).setOpenMode(OpenMode.CREATE));
String singleTerm = "lucene";
d = new Document();
@@ -465,7 +470,8 @@
final ByteArrayPool pool = new ByteArrayPool(numThreads, 5);
Directory dir = new RAMDirectory();
- final IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
final String field = "test";
Thread[] ingesters = new Thread[numThreads];
Index: src/test/org/apache/lucene/index/TestSegmentTermDocs.java
===================================================================
--- src/test/org/apache/lucene/index/TestSegmentTermDocs.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestSegmentTermDocs.java (working copy)
@@ -99,8 +99,7 @@
public void testSkipTo(int indexDivisor) throws IOException {
Directory dir = new RAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Term ta = new Term("content","aaa");
for(int i = 0; i < 10; i++)
Index: src/test/org/apache/lucene/index/TestSegmentTermEnum.java
===================================================================
--- src/test/org/apache/lucene/index/TestSegmentTermEnum.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestSegmentTermEnum.java (working copy)
@@ -23,20 +23,20 @@
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.MockRAMDirectory;
-public class TestSegmentTermEnum extends LuceneTestCase
-{
+public class TestSegmentTermEnum extends LuceneTestCase {
+
Directory dir = new RAMDirectory();
- public void testTermEnum() throws IOException
- {
+ public void testTermEnum() throws IOException {
IndexWriter writer = null;
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
// ADD 100 documents with term : aaa
// add 100 documents with terms: aaa bbb
@@ -52,7 +52,7 @@
verifyDocFreq();
// merge segments by optimizing the index
- writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), false, IndexWriter.MaxFieldLength.LIMITED);
+ writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.APPEND));
writer.optimize();
writer.close();
@@ -63,7 +63,7 @@
public void testPrevTermAtEnd() throws IOException
{
Directory dir = new MockRAMDirectory();
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
addDoc(writer, "aaa bbb");
writer.close();
SegmentReader reader = SegmentReader.getOnlySegmentReader(dir);
Index: src/test/org/apache/lucene/index/TestStressIndexing.java
===================================================================
--- src/test/org/apache/lucene/index/TestStressIndexing.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestStressIndexing.java (working copy)
@@ -18,15 +18,15 @@
import org.apache.lucene.util.*;
import org.apache.lucene.store.*;
+import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.*;
-import org.apache.lucene.analysis.*;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.search.*;
import java.util.Random;
import java.io.File;
public class TestStressIndexing extends LuceneTestCase {
- private static final Analyzer ANALYZER = new SimpleAnalyzer(TEST_VERSION_CURRENT);
private Random RANDOM;
private static abstract class TimedThread extends Thread {
@@ -118,15 +118,14 @@
stress test.
*/
public void runStressTest(Directory directory, MergeScheduler mergeScheduler) throws Exception {
- IndexWriter modifier = new IndexWriter(directory, ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter modifier = new IndexWriter(directory, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE).setMaxBufferedDocs(10).setMergeScheduler(
+ mergeScheduler));
- modifier.setMaxBufferedDocs(10);
-
TimedThread[] threads = new TimedThread[4];
int numThread = 0;
- if (mergeScheduler != null)
- modifier.setMergeScheduler(mergeScheduler);
// One modifier that writes 10 docs then removes 5, over
// and over:
Index: src/test/org/apache/lucene/index/TestStressIndexing2.java
===================================================================
--- src/test/org/apache/lucene/index/TestStressIndexing2.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestStressIndexing2.java (working copy)
@@ -14,20 +14,31 @@
* limitations under the License.
*/
-import org.apache.lucene.store.*;
-import org.apache.lucene.document.*;
-import org.apache.lucene.analysis.*;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import junit.framework.Assert;
+
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.Fieldable;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MockRAMDirectory;
import org.apache.lucene.util.LuceneTestCase;
+import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util._TestUtil;
-import org.apache.lucene.util.StringHelper;
-import org.apache.lucene.search.TermQuery;
-import java.util.*;
-import java.io.IOException;
-
-import junit.framework.Assert;
-
public class TestStressIndexing2 extends LuceneTestCase {
static int maxFields=4;
static int bigFieldSize=10;
@@ -40,8 +51,8 @@
public class MockIndexWriter extends IndexWriter {
- public MockIndexWriter(Directory dir, Analyzer a, boolean create, IndexWriter.MaxFieldLength mfl) throws IOException {
- super(dir, a, create, mfl);
+ public MockIndexWriter(Directory dir, IndexWriterConfig conf) throws IOException {
+ super(dir, conf);
}
@Override
@@ -123,9 +134,13 @@
public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map docs = new HashMap();
- IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- w.setUseCompoundFile(false);
-
+ IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE).setRAMBufferSizeMB(
+ 0.1).setMaxBufferedDocs(maxBufferedDocs));
+ LogMergePolicy lmp = (LogMergePolicy) w.getMergePolicy();
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
+ lmp.setMergeFactor(mergeFactor);
/***
w.setMaxMergeDocs(Integer.MAX_VALUE);
w.setMaxFieldLength(10000);
@@ -133,11 +148,6 @@
w.setMergeFactor(10);
***/
- // force many merges
- w.setMergeFactor(mergeFactor);
- w.setRAMBufferSizeMB(.1);
- w.setMaxBufferedDocs(maxBufferedDocs);
-
threads = new IndexingThread[nThreads];
for (int i=0; i indexRandom(int nThreads, int iterations, int range, Directory dir) throws IOException, InterruptedException {
Map docs = new HashMap();
for(int iter=0;iter<3;iter++) {
- IndexWriter w = new MockIndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
- w.setUseCompoundFile(false);
+ IndexWriter w = new MockIndexWriter(dir, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setOpenMode(OpenMode.CREATE)
+ .setRAMBufferSizeMB(0.1).setMaxBufferedDocs(maxBufferedDocs));
+ LogMergePolicy lmp = (LogMergePolicy) w.getMergePolicy();
+ lmp.setUseCompoundFile(false);
+ lmp.setUseCompoundDocStore(false);
+ lmp.setMergeFactor(mergeFactor);
- // force many merges
- w.setMergeFactor(mergeFactor);
- w.setRAMBufferSizeMB(.1);
- w.setMaxBufferedDocs(maxBufferedDocs);
-
threads = new IndexingThread[nThreads];
for (int i=0; i docs, Directory dir) throws IOException {
- IndexWriter w = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
// index all docs in a single thread
Iterator iter = docs.values().iterator();
@@ -409,7 +419,6 @@
Fieldable f2 = ff2.get(i);
if (f1.isBinary()) {
assert(f2.isBinary());
- //TODO
} else {
String s1 = f1.stringValue();
String s2 = f2.stringValue();
Index: src/test/org/apache/lucene/index/TestTermVectorsReader.java
===================================================================
--- src/test/org/apache/lucene/index/TestTermVectorsReader.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestTermVectorsReader.java (working copy)
@@ -92,8 +92,9 @@
}
Arrays.sort(tokens);
- IndexWriter writer = new IndexWriter(dir, new MyAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED);
- writer.setUseCompoundFile(false);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer()));
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundFile(false);
+ ((LogMergePolicy) writer.getMergePolicy()).setUseCompoundDocStore(false);
Document doc = new Document();
for(int i=0;i data = new HashMap();
data.put("index", "Rolled back to 1-"+id);
w.commit(data);
@@ -127,7 +127,7 @@
//Build index, of records 1 to 100, committing after each batch of 10
IndexDeletionPolicy sdp=new KeepAllDeletionPolicy();
- IndexWriter w=new IndexWriter(dir,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),sdp,MaxFieldLength.UNLIMITED);
+ IndexWriter w=new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setIndexDeletionPolicy(sdp));
for(int currentRecordId=1;currentRecordId<=100;currentRecordId++) {
Document doc=new Document();
doc.add(new Field(FIELD_RECORD_ID,""+currentRecordId,Field.Store.YES,Field.Index.ANALYZED));
@@ -195,9 +195,8 @@
for(int i=0;i<2;i++) {
// Unless you specify a prior commit point, rollback
// should not work:
- new IndexWriter(dir,new WhitespaceAnalyzer(TEST_VERSION_CURRENT),
- new DeleteLastCommitPolicy(),
- MaxFieldLength.UNLIMITED).close();
+ new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
+ .setIndexDeletionPolicy(new DeleteLastCommitPolicy())).close();
IndexReader r = IndexReader.open(dir, true);
assertEquals(100, r.numDocs());
r.close();
Index: src/test/org/apache/lucene/index/TestTransactions.java
===================================================================
--- src/test/org/apache/lucene/index/TestTransactions.java (revision 921633)
+++ src/test/org/apache/lucene/index/TestTransactions.java (working copy)
@@ -19,13 +19,17 @@
import java.io.IOException;
import java.util.Random;
-import org.apache.lucene.store.*;
-import org.apache.lucene.util.*;
-import org.apache.lucene.analysis.*;
-import org.apache.lucene.document.*;
-public class TestTransactions extends LuceneTestCase
-{
+import org.apache.lucene.analysis.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.MockRAMDirectory;
+import org.apache.lucene.util.English;
+import org.apache.lucene.util.LuceneTestCase;
+
+public class TestTransactions extends LuceneTestCase {
+
private Random RANDOM;
private static volatile boolean doFail;
@@ -88,17 +92,15 @@
@Override
public void doWork() throws Throwable {
- IndexWriter writer1 = new IndexWriter(dir1, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
- writer1.setMaxBufferedDocs(3);
- writer1.setMergeFactor(2);
- ((ConcurrentMergeScheduler) writer1.getMergeScheduler()).setSuppressExceptions();
+ IndexWriter writer1 = new IndexWriter(dir1, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(3));
+ ((LogMergePolicy) writer1.getMergePolicy()).setMergeFactor(2);
+ ((ConcurrentMergeScheduler) writer1.getConfig().getMergeScheduler()).setSuppressExceptions();
- IndexWriter writer2 = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
// Intentionally use different params so flush/merge
// happen @ different times
- writer2.setMaxBufferedDocs(2);
- writer2.setMergeFactor(3);
- ((ConcurrentMergeScheduler) writer2.getMergeScheduler()).setSuppressExceptions();
+ IndexWriter writer2 = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(2));
+ ((LogMergePolicy) writer2.getMergePolicy()).setMergeFactor(3);
+ ((ConcurrentMergeScheduler) writer2.getConfig().getMergeScheduler()).setSuppressExceptions();
update(writer1);
update(writer2);
@@ -178,7 +180,7 @@
}
public void initIndex(Directory dir) throws Throwable {
- IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for(int j=0; j<7; j++) {
Document d = new Document();
int n = RANDOM.nextInt();
Index: src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java
===================================================================
--- src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (revision 921633)
+++ src/test/org/apache/lucene/queryParser/TestMultiFieldQueryParser.java (working copy)
@@ -28,6 +28,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
@@ -283,7 +284,7 @@
public void testStopWordSearching() throws Exception {
Analyzer analyzer = new StandardAnalyzer(TEST_VERSION_CURRENT);
Directory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
doc.add(new Field("body", "blah the footest blah", Field.Store.NO, Field.Index.ANALYZED));
iw.addDocument(doc);
Index: src/test/org/apache/lucene/queryParser/TestQueryParser.java
===================================================================
--- src/test/org/apache/lucene/queryParser/TestQueryParser.java (revision 921633)
+++ src/test/org/apache/lucene/queryParser/TestQueryParser.java (working copy)
@@ -46,6 +46,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanQuery;
@@ -472,8 +473,7 @@
public void testFarsiRangeCollating() throws Exception {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("content","\u0633\u0627\u0628",
Field.Store.YES, Field.Index.NOT_ANALYZED));
@@ -882,7 +882,7 @@
public void testLocalDateFormat() throws IOException, ParseException {
RAMDirectory ramDir = new RAMDirectory();
- IndexWriter iw = new IndexWriter(ramDir, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter iw = new IndexWriter(ramDir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
addDateDoc("a", 2005, 12, 2, 10, 15, 33, iw);
addDateDoc("b", 2005, 12, 4, 22, 15, 00, iw);
iw.close();
@@ -1028,7 +1028,7 @@
public void testPositionIncrements() throws Exception {
Directory dir = new MockRAMDirectory();
Analyzer a = new StandardAnalyzer(TEST_VERSION_CURRENT);
- IndexWriter w = new IndexWriter(dir, a, IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, a));
Document doc = new Document();
doc.add(new Field("f", "the wizard of ozzy", Field.Store.NO, Field.Index.ANALYZED));
w.addDocument(doc);
Index: src/test/org/apache/lucene/search/BaseTestRangeFilter.java
===================================================================
--- src/test/org/apache/lucene/search/BaseTestRangeFilter.java (revision 921633)
+++ src/test/org/apache/lucene/search/BaseTestRangeFilter.java (working copy)
@@ -24,6 +24,8 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.RAMDirectory;
public class BaseTestRangeFilter extends LuceneTestCase {
@@ -96,8 +98,9 @@
try {
/* build an index */
- IndexWriter writer = new IndexWriter(index.index, new SimpleAnalyzer(TEST_VERSION_CURRENT), T,
- IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer = new IndexWriter(index.index, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new SimpleAnalyzer(TEST_VERSION_CURRENT))
+ .setOpenMode(OpenMode.CREATE));
for (int d = minId; d <= maxId; d++) {
Document doc = new Document();
Index: src/test/org/apache/lucene/search/QueryUtils.java
===================================================================
--- src/test/org/apache/lucene/search/QueryUtils.java (revision 921633)
+++ src/test/org/apache/lucene/search/QueryUtils.java (working copy)
@@ -12,8 +12,8 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiReader;
-import org.apache.lucene.index.IndexWriter.MaxFieldLength;
import org.apache.lucene.store.RAMDirectory;
import static org.apache.lucene.util.LuceneTestCaseJ4.TEST_VERSION_CURRENT;
@@ -200,8 +200,8 @@
private static RAMDirectory makeEmptyIndex(final int numDeletedDocs)
throws IOException {
RAMDirectory d = new RAMDirectory();
- IndexWriter w = new IndexWriter(d, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true,
- MaxFieldLength.LIMITED);
+ IndexWriter w = new IndexWriter(d, new IndexWriterConfig(
+ TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < numDeletedDocs; i++) {
w.addDocument(new Document());
}
Index: src/test/org/apache/lucene/search/TestBoolean2.java
===================================================================
--- src/test/org/apache/lucene/search/TestBoolean2.java (revision 921633)
+++ src/test/org/apache/lucene/search/TestBoolean2.java (working copy)
@@ -24,6 +24,7 @@
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.queryParser.ParseException;
@@ -50,7 +51,7 @@
protected void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
- IndexWriter writer= new IndexWriter(directory, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), true, IndexWriter.MaxFieldLength.LIMITED);
+ IndexWriter writer= new IndexWriter(directory, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(new Field(field, docFields[i], Field.Store.NO, Field.Index.ANALYZED));
@@ -67,14 +68,14 @@
int docCount = 0;
do {
final Directory copy = new RAMDirectory(dir2);
- IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
w.addIndexesNoOptimize(new Directory[] {copy});
docCount = w.maxDoc();
w.close();
mulFactor *= 2;
} while(docCount < 3000);
- IndexWriter w = new IndexWriter(dir2, new WhitespaceAnalyzer(TEST_VERSION_CURRENT), IndexWriter.MaxFieldLength.UNLIMITED);
+ IndexWriter w = new IndexWriter(dir2, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)));
Document doc = new Document();
doc.add(new Field("field2", "xxx", Field.Store.NO, Field.Index.ANALYZED));
for(int i=0;i