RTools.Util.StreamTokenizer.TokenizeString C# (CSharp) Method

TokenizeString() public method

Parse all tokens from the specified string, put them into the input List.
public TokenizeString ( string str, List tokens ) : bool
str string
tokens List The List to put tokens in.
return bool
        public bool TokenizeString(string str, List<Token> tokens)
        {
            textReader = new StringReader(str);
            return (Tokenize(tokens));
        }

Usage Example

コード例 #1
0
        // ---------------------------------------------------------------------
        #region TestSelf
        // ---------------------------------------------------------------------
        /// <summary>
        /// Simple self test.  See StreamTokenizerTestCase for full
        /// tests.
        /// </summary>
        /// <returns>bool - true for success, false for failure.</returns>
        public static bool TestSelf()
        {
            Logger log = new Logger("testSelf");
            log.Verbosity = VerbosityLevel.Debug;
            log.Info("Starting...");
            string testString;
            List<Token> tokens;

            // setup tokenizer
            StreamTokenizer tokenizer = new StreamTokenizer();
            tokenizer.Settings.SetupForCodeParse();
            tokenizer.Verbosity = VerbosityLevel.Debug;

            //
            // try string parse
            //
            log.Write("--------------------------------------------------------\n");
            log.Info("string parse:");
            log.Write("--------------------------------------------------------\n");
            tokens = new List<Token>();
            testString = "-1.2ej";
            tokenizer.Settings.DoUntermCheck = false;
            tokenizer.Settings.GrabWhitespace = false;

            if (!tokenizer.TokenizeString(testString, tokens))
            {
                log.Error("Unable to parse into token vector.");
                return (false);
            }

            foreach (Token t in tokens) log.Info("Token = '{0}'", t.ToString());
            tokens = new List<Token>();

            //
            // try NextToken style
            //
            //			log.Write("--------------------------------------------------------\n");
            //			log.Info("NextToken use");
            //			log.Write("--------------------------------------------------------\n");
            //string fileName = "st-testSelf.tmp";
            //testString = "this is a simple string";
            //tokenizer.TextReader = new StringReader(testString);
            //tokenizer.TextReader = File.OpenText(fileName);
            //Token token;
            //while (tokenizer.NextToken(out token)) log.Info("Token = '{0}'", token);

            //
            // try TokenizeFile
            //
            log.Write("--------------------------------------------------------\n");
            log.Info("Tokenize missing file");
            log.Write("--------------------------------------------------------\n");
            string nonExistentFileName = "ThisFile better not exist";
            bool caughtIt = false;
            try
            {
                tokenizer.TokenizeFile(nonExistentFileName);
            }
            catch (FileNotFoundException e)
            {
                log.Info("Correctly caught exception: {0}: {1}", e.GetType().ToString(), e.Message);
                caughtIt = true;
            }
            if (!caughtIt)
            {
                log.Error("Didn't get a file not found exception from TokenizeFile.");
                return (false);
            }

            //
            // test line numbers in tokens
            //

            // done
            log.Info("Done.");
            return (true);
        }