public Insanity[] Check(params FieldCache.CacheEntry[] cacheEntries)
{
if (null == cacheEntries || 0 == cacheEntries.Length)
{
return new Insanity[0];
}
if (EstimateRam)
{
for (int i = 0; i < cacheEntries.Length; i++)
{
cacheEntries[i].EstimateSize();
}
}
// the indirect mapping lets MapOfSet dedup identical valIds for us
// maps the (valId) identityhashCode of cache values to
// sets of CacheEntry instances
MapOfSets<int, FieldCache.CacheEntry> valIdToItems = new MapOfSets<int, FieldCache.CacheEntry>(new Dictionary<int, HashSet<FieldCache.CacheEntry>>(17));
// maps ReaderField keys to Sets of ValueIds
MapOfSets<ReaderField, int> readerFieldToValIds = new MapOfSets<ReaderField, int>(new Dictionary<ReaderField, HashSet<int>>(17));
// any keys that we know result in more then one valId
ISet<ReaderField> valMismatchKeys = new HashSet<ReaderField>();
// iterate over all the cacheEntries to get the mappings we'll need
for (int i = 0; i < cacheEntries.Length; i++)
{
FieldCache.CacheEntry item = cacheEntries[i];
object val = item.Value;
// It's OK to have dup entries, where one is eg
// float[] and the other is the Bits (from
// getDocWithField())
if (val is Bits)
{
continue;
}
if (val is Lucene.Net.Search.FieldCache.CreationPlaceholder)
{
continue;
}
ReaderField rf = new ReaderField(item.ReaderKey, item.FieldName);
int valId = val.GetHashCode();
// indirect mapping, so the MapOfSet will dedup identical valIds for us
valIdToItems.Put(valId, item);
if (1 < readerFieldToValIds.Put(rf, valId))
{
valMismatchKeys.Add(rf);
}
}
List<Insanity> insanity = new List<Insanity>(valMismatchKeys.Count * 3);
insanity.AddRange(CheckValueMismatch(valIdToItems, readerFieldToValIds, valMismatchKeys));
insanity.AddRange(CheckSubreaders(valIdToItems, readerFieldToValIds));
return insanity.ToArray();
}