Assembler.AssemblyTokenizer.ScanSource C# (CSharp) Method

ScanSource() private method

private ScanSource ( TokenList sourceTokens ) : void
sourceTokens TokenList
return void
        private void ScanSource(TokenList<BasicToken> sourceTokens)
        {
            for (var i = 0; sourceTokens[i].Type != BasicTokenType.EndOfFile; i++)
            {
                var token = sourceTokens[i];

                switch (token.Type)
                {
                    case BasicTokenType.Word:
                        {
                            HashSet<string> opcodes = new HashSet<string>
                        {
                            "set", "add", "sub", "mul", "div", "mod", "inc", "dec", "not", "and", "or", "xor", "shl",
                            "shr", "push", "pop", "jmp", "call", "ret", "in", "out", "cmp", "jz", "jnz", "je", "ja",
                            "jb", "jae", "jbe", "jne"
                        };

                            if (opcodes.Contains(token.Value.ToLower()))
                                tokens.Add(new Token(TokenType.Keyword, token.Value.ToLower(), token.Line));
                            else
                            {
                                if (defines.ContainsKey(token.Value))
                                {
                                    List<Token> define = defines[token.Value];
                                    foreach (Token defineToken in define)
                                        tokens.Add(new Token(defineToken.Type, defineToken.Value, defineToken.Line));
                                }
                                else
                                    tokens.Add(new Token(TokenType.Word, token.Value, token.Line));
                            }
                            break;
                        }

                    case BasicTokenType.Delimiter:
                        {
                            Dictionary<string, TokenType> delimiters = new Dictionary<string, TokenType>
                        {
                            { ",", TokenType.Comma },
                            { "[", TokenType.OpenBracket },
                            { "]", TokenType.CloseBracket },
                            { "(", TokenType.OpenParentheses },
                            { ")", TokenType.CloseParentheses },
                            { ".", TokenType.Period },
                            { "+", TokenType.Add },
                            { "-", TokenType.Subtract },
                            { "*", TokenType.Multiply },
                            { "/", TokenType.Divide },
                            { "%", TokenType.Modulo },
                            { "~", TokenType.BitwiseNot },
                            { "&", TokenType.BitwiseAnd },
                            { "|", TokenType.BitwiseOr },
                            { "^", TokenType.BitwiseXor }
                        };

                            if (delimiters.ContainsKey(token.Value))
                            {
                                tokens.Add(new Token(delimiters[token.Value], token.Value, token.Line));
                                break;
                            }

                            if (token.Value == ":" && tokens.Count > 0)
                            {
                                var last = tokens[tokens.Count - 1];
                                if (last.Type == TokenType.Word)
                                {
                                    tokens.RemoveAt(tokens.Count - 1);
                                    tokens.Add(new Token(TokenType.Label, last.Value, last.Line));
                                    break;
                                }
                            }

                            if (token.Value == "#")
                            {
                                token = sourceTokens[++i];
                                switch (token.Value)
                                {
                                    case "include":
                                        {
                                            BasicToken filenameToken = sourceTokens[++i];
                                            string includeSource;

                                            try
                                            {
                                                includeSource = File.ReadAllText(filenameToken.Value);
                                            }
                                            catch (Exception)
                                            {
                                                throw new AssemblerException(String.Format("Cannot open included file \"{0}\" at {2}:{1}.",
                                                    filenameToken.Value, filenameToken.Line, filenameToken.Filename));
                                            }

                                            var tokenizer = new Tokenizer(includeSource);
                                            tokenizer.Scan();

                                            ScanSource(tokenizer.Tokens);
                                            break;
                                        }

                                    case "define":
                                        {
                                            List<Token> defineTokens = new List<Token>();
                                            BasicToken name = sourceTokens[++i];

                                            while (sourceTokens[++i].Line == name.Line && i < sourceTokens.Count)
                                            {
                                                defineTokens.Add(new Token(TokenType.Number, sourceTokens[i].Value, sourceTokens[i].Line));
                                            }

                                            defines.Add(name.Value, defineTokens);
                                            --i;
                                            break;
                                        }

                                    default:
                                        throw new AssemblerException(String.Format("Unexpected preprocessor directive \"{0}\".", token.Value));
                                }
                                break;
                            }

                            throw new AssemblerException(String.Format("Unexpected delimiter '{0}'", token.Value));
                        }

                    case BasicTokenType.Number:
                        tokens.Add(new Token(TokenType.Number, token.Value, token.Line));
                        break;

                    case BasicTokenType.String:
                        tokens.Add(new Token(TokenType.String, token.Value, token.Line));
                        break;

                    default:
                        throw new AssemblerException(String.Format("Unhandled BasicToken {0}", token.Type));
                }
            }
        }