public void BeforeClass()
{
Directory = NewDirectory();
RandomIndexWriter writer = new RandomIndexWriter(Random(), Directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NewLogMergePolicy()));
for (int i = 0; i < DocFields.Length; i++)
{
Document doc = new Document();
doc.Add(NewTextField(field, DocFields[i], Field.Store.NO));
writer.AddDocument(doc);
}
writer.Dispose();
LittleReader = DirectoryReader.Open(Directory);
Searcher = NewSearcher(LittleReader);
// this is intentionally using the baseline sim, because it compares against bigSearcher (which uses a random one)
Searcher.Similarity = new DefaultSimilarity();
// Make big index
Dir2 = new MockDirectoryWrapper(Random(), new RAMDirectory(Directory, IOContext.DEFAULT));
// First multiply small test index:
MulFactor = 1;
int docCount = 0;
if (VERBOSE)
{
Console.WriteLine("\nTEST: now copy index...");
}
do
{
if (VERBOSE)
{
Console.WriteLine("\nTEST: cycle...");
}
Directory copy = new MockDirectoryWrapper(Random(), new RAMDirectory(Dir2, IOContext.DEFAULT));
RandomIndexWriter w = new RandomIndexWriter(Random(), Dir2, Similarity, TimeZone);
w.AddIndexes(copy);
docCount = w.MaxDoc();
w.Dispose();
MulFactor *= 2;
} while (docCount < 3000);
RandomIndexWriter riw = new RandomIndexWriter(Random(), Dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMaxBufferedDocs(TestUtil.NextInt(Random(), 50, 1000)));
Document doc_ = new Document();
doc_.Add(NewTextField("field2", "xxx", Field.Store.NO));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
riw.AddDocument(doc_);
}
doc_ = new Document();
doc_.Add(NewTextField("field2", "big bad bug", Field.Store.NO));
for (int i = 0; i < NUM_EXTRA_DOCS / 2; i++)
{
riw.AddDocument(doc_);
}
Reader = riw.Reader;
BigSearcher = NewSearcher(Reader);
riw.Dispose();
}