private void RecoverError(Lexeme z)
{
List<ParserHead> shiftedHeads = m_shiftedHeads;
m_heads.Clear();
int errorHeadCount = m_errorCandidates.Count;
Debug.Assert(errorHeadCount > 0);
for (int i = 0; i < errorHeadCount; i++)
{
var head = m_errorCandidates[i];
//option 1: remove
//remove current token and continue
if (!z.IsEndOfStream)
{
var deleteHead = head.Clone();
deleteHead.IncreaseErrorRecoverLevel();
deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span) { ErrorArgument = z.Value });
shiftedHeads.Add(deleteHead);
}
//option 2: insert
//insert all possible shifts token and continue
Queue<ParserHead> recoverQueue = new Queue<ParserHead>();
for (int j = 0; j < m_transitions.TokenCount - 1; j++)
{
recoverQueue.Enqueue(head);
while (recoverQueue.Count > 0)
{
var recoverHead = recoverQueue.Dequeue();
int recoverStateNumber = recoverHead.TopStackStateIndex;
var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);
int tokenIndex;
if (shiftLexer == null)
{
tokenIndex = z.TokenIndex;
}
else
{
tokenIndex = z.GetTokenIndex(shiftLexer.Value);
}
var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
var recoverShift = recoverShifts;
while (recoverShift != null)
{
var insertHead = recoverHead.Clone();
var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
insertHead.Shift(insertLexeme, recoverShift.Value);
insertHead.IncreaseErrorRecoverLevel();
insertHead.AddError(new ErrorRecord(m_errorDef.TokenMissingId, z.Value.Span) { ErrorArgument = insertLexeme.Value });
m_heads.Add(insertHead);
recoverShift = recoverShift.GetNext();
}
var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);
if (reduceLexer == null)
{
tokenIndex = z.TokenIndex;
}
else
{
tokenIndex = z.GetTokenIndex(reduceLexer.Value);
}
var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
var recoverReduce = recoverReduces;
while (recoverReduce != null)
{
int productionIndex = recoverReduce.Value;
IProduction production = m_transitions.NonTerminals[productionIndex];
var reducedHead = recoverHead.Clone();
reducedHead.Reduce(production, m_reducer, z);
//add back to queue, until shifted
m_recoverReducedHeads.Add(reducedHead);
//get next reduce
recoverReduce = recoverReduce.GetNext();
}
if (m_recoverReducedHeads.Count > 0)
{
m_tempHeads.Clear();
m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
m_recoverReducedHeads.Clear();
foreach (var recoveredHead in m_tempHeads)
{
recoverQueue.Enqueue(recoveredHead);
}
}
}
}
}
}