public virtual void TestFieldNumberGaps()
{
int numIters = AtLeast(13);
for (int i = 0; i < numIters; i++)
{
Directory dir = NewDirectory();
{
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d1 first field", Field.Store.YES));
d.Add(new TextField("f2", "d1 second field", Field.Store.YES));
writer.AddDocument(d);
writer.Dispose();
SegmentInfos sis = new SegmentInfos();
sis.Read(dir);
Assert.AreEqual(1, sis.Size());
FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
}
{
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d2 first field", Field.Store.YES));
d.Add(new StoredField("f3", new byte[] { 1, 2, 3 }));
writer.AddDocument(d);
writer.Dispose();
SegmentInfos sis = new SegmentInfos();
sis.Read(dir);
Assert.AreEqual(2, sis.Size());
FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
Assert.IsNull(fis2.FieldInfo(1));
Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
}
{
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
Document d = new Document();
d.Add(new TextField("f1", "d3 first field", Field.Store.YES));
d.Add(new TextField("f2", "d3 second field", Field.Store.YES));
d.Add(new StoredField("f3", new byte[] { 1, 2, 3, 4, 5 }));
writer.AddDocument(d);
writer.Dispose();
SegmentInfos sis = new SegmentInfos();
sis.Read(dir);
Assert.AreEqual(3, sis.Size());
FieldInfos fis1 = SegmentReader.ReadFieldInfos(sis.Info(0));
FieldInfos fis2 = SegmentReader.ReadFieldInfos(sis.Info(1));
FieldInfos fis3 = SegmentReader.ReadFieldInfos(sis.Info(2));
Assert.AreEqual("f1", fis1.FieldInfo(0).Name);
Assert.AreEqual("f2", fis1.FieldInfo(1).Name);
Assert.AreEqual("f1", fis2.FieldInfo(0).Name);
Assert.IsNull(fis2.FieldInfo(1));
Assert.AreEqual("f3", fis2.FieldInfo(2).Name);
Assert.AreEqual("f1", fis3.FieldInfo(0).Name);
Assert.AreEqual("f2", fis3.FieldInfo(1).Name);
Assert.AreEqual("f3", fis3.FieldInfo(2).Name);
}
{
IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(Random().NextBoolean() ? NoMergePolicy.NO_COMPOUND_FILES : NoMergePolicy.COMPOUND_FILES));
writer.DeleteDocuments(new Term("f1", "d1"));
// nuke the first segment entirely so that the segment with gaps is
// loaded first!
writer.ForceMergeDeletes();
writer.Dispose();
}
IndexWriter writer_ = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetMergePolicy(new LogByteSizeMergePolicy()).SetInfoStream(new FailOnNonBulkMergesInfoStream()));
writer_.ForceMerge(1);
writer_.Dispose();
SegmentInfos sis_ = new SegmentInfos();
sis_.Read(dir);
Assert.AreEqual(1, sis_.Size());
FieldInfos fis1_ = SegmentReader.ReadFieldInfos(sis_.Info(0));
Assert.AreEqual("f1", fis1_.FieldInfo(0).Name);
Assert.AreEqual("f2", fis1_.FieldInfo(1).Name);
Assert.AreEqual("f3", fis1_.FieldInfo(2).Name);
dir.Dispose();
}
}