public virtual void TestPositionIncrements()
{
SlowSynonymMap map = new SlowSynonymMap();
bool orig = false;
bool merge = true;
// test that generated tokens start at the same posInc as the original
map.Add(Strings("a"), Tokens("aa"), orig, merge);
AssertTokenizesTo(map, Tokens("a,5"), new string[] { "aa" }, new int[] { 5 });
AssertTokenizesTo(map, Tokens("b,1 a,0"), new string[] { "b", "aa" }, new int[] { 1, 0 });
// test that offset of first replacement is ignored (always takes the orig offset)
map.Add(Strings("b"), Tokens("bb,100"), orig, merge);
AssertTokenizesTo(map, Tokens("b,5"), new string[] { "bb" }, new int[] { 5 });
AssertTokenizesTo(map, Tokens("c,1 b,0"), new string[] { "c", "bb" }, new int[] { 1, 0 });
// test that subsequent tokens are adjusted accordingly
map.Add(Strings("c"), Tokens("cc,100 c2,2"), orig, merge);
AssertTokenizesTo(map, Tokens("c,5"), new string[] { "cc", "c2" }, new int[] { 5, 2 });
AssertTokenizesTo(map, Tokens("d,1 c,0"), new string[] { "d", "cc", "c2" }, new int[] { 1, 0, 2 });
}