VBF.Compilers.Scanners.Lexeme.GetErrorCorrectionLexeme C# (CSharp) Méthode

GetErrorCorrectionLexeme() public méthode

public GetErrorCorrectionLexeme ( int expectedTokenIndex, string expectedValue ) : Lexeme
expectedTokenIndex int
expectedValue string
Résultat Lexeme
        public Lexeme GetErrorCorrectionLexeme(int expectedTokenIndex, string expectedValue)
        {
            if (m_scannerInfo == null)
            {
                throw new InvalidOperationException("This lexeme is not initialized");
            }
            int state = m_scannerInfo.GetStateIndex(expectedTokenIndex);
            if (state < 0) throw new ArgumentException("Expected token index is invalid", "expectedTokenIndex");

            return new Lexeme(m_scannerInfo, state, new SourceSpan(Value.Span.StartLocation, Value.Span.StartLocation), expectedValue);
        }

Usage Example

Exemple #1
0
        private void RecoverError(Lexeme z)
        {
            List<ParserHead> shiftedHeads = m_shiftedHeads;

            m_heads.Clear();
            int errorHeadCount = m_errorCandidates.Count;

            Debug.Assert(errorHeadCount > 0);

            for (int i = 0; i < errorHeadCount; i++)
            {
                var head = m_errorCandidates[i];

                //option 1: remove
                //remove current token and continue
                if (!z.IsEndOfStream)
                {
                    var deleteHead = head.Clone();

                    deleteHead.IncreaseErrorRecoverLevel();
                    deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span) { ErrorArgument = z.Value });

                    shiftedHeads.Add(deleteHead);
                }

                //option 2: insert
                //insert all possible shifts token and continue
                Queue<ParserHead> recoverQueue = new Queue<ParserHead>();

                for (int j = 0; j < m_transitions.TokenCount - 1; j++)
                {
                    recoverQueue.Enqueue(head);

                    while (recoverQueue.Count > 0)
                    {
                        var recoverHead = recoverQueue.Dequeue();
                        int recoverStateNumber = recoverHead.TopStackStateIndex;

                        var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);

                        int tokenIndex;
                        if (shiftLexer == null)
                        {
                            tokenIndex = z.TokenIndex;
                        }
                        else
                        {
                            tokenIndex = z.GetTokenIndex(shiftLexer.Value);
                        }

                        var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
                        var recoverShift = recoverShifts;

                        while (recoverShift != null)
                        {
                            var insertHead = recoverHead.Clone();

                            var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
                            insertHead.Shift(insertLexeme, recoverShift.Value);
                            insertHead.IncreaseErrorRecoverLevel();
                            insertHead.AddError(new ErrorRecord(m_errorDef.TokenMissingId, z.Value.Span) { ErrorArgument = insertLexeme.Value });

                            m_heads.Add(insertHead);

                            recoverShift = recoverShift.GetNext();
                        }

                        var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);

                        if (reduceLexer == null)
                        {
                            tokenIndex = z.TokenIndex;
                        }
                        else
                        {
                            tokenIndex = z.GetTokenIndex(reduceLexer.Value);
                        }

                        var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
                        var recoverReduce = recoverReduces;

                        while (recoverReduce != null)
                        {
                            int productionIndex = recoverReduce.Value;
                            IProduction production = m_transitions.NonTerminals[productionIndex];

                            var reducedHead = recoverHead.Clone();

                            reducedHead.Reduce(production, m_reducer, z);

                            //add back to queue, until shifted
                            m_recoverReducedHeads.Add(reducedHead);

                            //get next reduce
                            recoverReduce = recoverReduce.GetNext();
                        }

                        if (m_recoverReducedHeads.Count > 0)
                        {
                            m_tempHeads.Clear();
                            m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
                            m_recoverReducedHeads.Clear();

                            foreach (var recoveredHead in m_tempHeads)
                            {
                                recoverQueue.Enqueue(recoveredHead);
                            }
                        }
                    }
                }
            }
        }
All Usage Examples Of VBF.Compilers.Scanners.Lexeme::GetErrorCorrectionLexeme