public virtual void TestEndOffsetPositionWithCachingTokenFilter()
{
Directory dir = NewDirectory();
Analyzer analyzer = new MockAnalyzer(Random());
IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer));
Document doc = new Document();
IOException priorException = null;
TokenStream stream = analyzer.TokenStream("field", new StringReader("abcd "));
try
{
stream.Reset(); // TODO: weird to reset before wrapping with CachingTokenFilter... correct?
TokenStream cachedStream = new CachingTokenFilter(stream);
FieldType customType = new FieldType(TextField.TYPE_NOT_STORED);
customType.StoreTermVectors = true;
customType.StoreTermVectorPositions = true;
customType.StoreTermVectorOffsets = true;
Field f = new Field("field", cachedStream, customType);
doc.Add(f);
doc.Add(f);
w.AddDocument(doc);
}
catch (IOException e)
{
priorException = e;
}
finally
{
IOUtils.CloseWhileHandlingException(priorException, stream);
}
w.Dispose();
IndexReader r = DirectoryReader.Open(dir);
TermsEnum termsEnum = r.GetTermVectors(0).Terms("field").Iterator(null);
Assert.IsNotNull(termsEnum.Next());
DocsAndPositionsEnum dpEnum = termsEnum.DocsAndPositions(null, null);
Assert.AreEqual(2, termsEnum.TotalTermFreq());
Assert.IsTrue(dpEnum.NextDoc() != DocIdSetIterator.NO_MORE_DOCS);
dpEnum.NextPosition();
Assert.AreEqual(0, dpEnum.StartOffset());
Assert.AreEqual(4, dpEnum.EndOffset());
dpEnum.NextPosition();
Assert.AreEqual(8, dpEnum.StartOffset());
Assert.AreEqual(12, dpEnum.EndOffset());
Assert.AreEqual(DocIdSetIterator.NO_MORE_DOCS, dpEnum.NextDoc());
r.Dispose();
dir.Dispose();
}