Beispiel #1
0
        static void Main(string[] args)
        {
            //Read file
            string       sampleFileName = "First.txt";
            MyFileReader myFileReader   = new MyFileReader(sampleFileName);
            string       text           = myFileReader.ReadFile();

            //Make tokens
            Token[] tokens = new LexicalAnalysis().ProcessTextAndGenerateTokens(text);
            tokens.ToList().ForEach(t => Console.WriteLine(t.ToString()));
            Console.WriteLine("--------------------------------------------------------------------\n\n");

            //Parse syntax analysis phase
            SyntaxAnalysis       syntaxAnalysis       = new SyntaxAnalysis(tokens);
            SyntaxAnalysisResult syntaxAnalysisResult = syntaxAnalysis.Parse();

            if (!syntaxAnalysisResult.SyntaxParsed)
            {
                Console.WriteLine(syntaxAnalysisResult.Exception.Message);
            }
            else
            {
                Console.WriteLine("Syntax Analysis phase passed");
            }



            Console.Read();
        }
Beispiel #2
0
        /// <summary>
        /// 将json字符串归一化,将有用的信息保留,让其更加紧凑
        /// </summary>
        public string PretreatmentProc()
        {
            string temp = "";
            //string tempWord = "";

            int i   = 0;
            int end = 0;

            while (i < originData.Length)
            {
                //跳过那些无用的字符 ' ', '\t', '\r' '\n'
                if (!string.IsNullOrEmpty(LexicalAnalysis.isSpecialSymbol(originData, i, ref end)))
                {
                    i = end;
                    continue;
                }

                //跳过注释
                if (!string.IsNullOrEmpty(/*tempWord = */ LexicalAnalysis.isComment(originData, i, ref end)))
                {
                    //Debug.Log(tempWord);
                    i = end;
                    continue;
                }
                temp += originData[i];
                i++;
            }

            return(temp);
        }
Beispiel #3
0
        private void compileButton_Click(object sender, EventArgs e)
        {
            if (optionsTabControl.SelectedTab == editorTab)
            {
                _output            = new Output(codeTextBox.Text);
                outputTextBox.Text = _output.FormattedValue;
            }
            else
            {
                _output            = new Output(_fileText);
                outputTextBox.Text = _output.FormattedValue;
            }

            foreach (var line in _output.Value)
            {
                Cache.Cache.Populate(line);
            }

            var lexicalAnalyzer = new LexicalAnalysis();
            LexicalComponent lexicalComponent = null;

            lexicalAnalyzer.LoadNewLine();

            do
            {
                lexicalComponent = lexicalAnalyzer.BuildComponent();
                MessageBox.Show(lexicalComponent.ToString());
            } while (!lexicalComponent.Category.Equals(Category.EndOfFile));

            MessageBox.Show("Ejecución finalizada");
        }
Beispiel #4
0
        public void TestFiniteStateAutomatonNL()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/NL.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"NL\",line feed>\n", lexicalAnalysis.Lexer("\n", true));
            Assert.AreEqual("<\"NL\",next line>\u0085", lexicalAnalysis.Lexer("\u0085", true));
        }
Beispiel #5
0
        public void TestFiniteStateAutomatonKW()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/KW.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"KW\",while>", lexicalAnalysis.Lexer("while", true));
            Assert.AreEqual("<\"KW\",if>", lexicalAnalysis.Lexer("if", true));
        }
Beispiel #6
0
        public void TestFiniteStateAutomatonWS()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/WS.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"WS\",spase>", lexicalAnalysis.Lexer(" ", true));
            Assert.AreEqual("<\"WS\",horizontal tab>", lexicalAnalysis.Lexer("\t", true));
            Assert.AreEqual("<\"WS\",vertical tab>", lexicalAnalysis.Lexer("\v", true));
        }
Beispiel #7
0
        public void TestFiniteStateAutomatonOP()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/OP.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"OP\",{>", lexicalAnalysis.Lexer("{", true));
            Assert.AreEqual("<\"OP\",==>", lexicalAnalysis.Lexer("==", true));
            Assert.AreEqual("<\"OP\",>>>", lexicalAnalysis.Lexer(">>", true));
        }
Beispiel #8
0
        public void TestFiniteStateAutomatonINTEGER()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/INTEGER.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"INTEGER\",100>", lexicalAnalysis.Lexer("100", true));
            Assert.AreEqual("<\"INTEGER\",-100>", lexicalAnalysis.Lexer("-100", true));

            Assert.AreEqual("error", lexicalAnalysis.Lexer("-10.1", true));
        }
Beispiel #9
0
        static void Main(string[] args)
        {
            var parser          = new TextToNode();
            var lexicalAnalyzer = new LexicalAnalysis();

            while (true)
            {
                var expression = new Expression();

                Console.WriteLine("------------");
                Console.WriteLine($"テストする式を入力してください(式の例は sample と入力ください)");

                var text = Console.ReadLine();

                if (text == "sample")
                {
                    text = "[H%]*([Y]/([X]+[Y]*[Z]))*(2.0141/1.0079)";
                }

                Console.WriteLine("------------");
                Console.WriteLine($"解析する式 : {text}");
                Console.WriteLine("------------");

                var node = parser.Parse(text);

                // 字句解析
                lexicalAnalyzer.Perform(node);

                // 変数値の存在をチェックする
                var attributeNames = node.GetAttributes().OrderBy(p => p);

                // 変数の代入
                foreach (var name in attributeNames)
                {
                    Console.Write($"{name} の値を入力 : ");
                    var value = double.Parse(Console.ReadLine());

                    var attribute = new AttributeValue(name, value);
                    expression.AddAttribute(attribute);
                }

                var result = expression.Execute(node);

                if (result != null)
                {
                    Console.WriteLine($"答え : {result}");
                }
                else
                {
                    Console.WriteLine("計算結果は表示できませんでした。式または値に誤りがあります。");
                }
            }
        }
Beispiel #10
0
        public void TestFiniteStateAutomatonSTRING()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/STRING.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"STRING\",\"abc\">", lexicalAnalysis.Lexer("\"abc\"", true));
            Assert.AreEqual("<\"STRING\",\"\">", lexicalAnalysis.Lexer("\"\"", true));
            Assert.AreEqual("<\"STRING\",@\"абв\">", lexicalAnalysis.Lexer("@\"абв\"", true));

            Assert.AreEqual("error", lexicalAnalysis.Lexer("\"123", true));
        }
Beispiel #11
0
        private void TestCountOfLexems(string stringToParse, int expectedNumerOfLexems)
        {
            var lexicalAnalisis = new LexicalAnalysis();

            Assert.NotNull(lexicalAnalisis);

            List <Lexem> lexems = LexicalAnalysis.AnalizeLine(stringToParse);

            Assert.NotNull(lexems);

            Assert.AreEqual(expectedNumerOfLexems, lexems.Count);
        }
Beispiel #12
0
        public void TestFiniteStateAutomatonID()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/ID.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"ID\",test>", lexicalAnalysis.Lexer("test", true));
            Assert.AreEqual("<\"ID\",test1>", lexicalAnalysis.Lexer("test1", true));
            Assert.AreEqual("<\"ID\",_test>", lexicalAnalysis.Lexer("_test", true));
            Assert.AreEqual("<\"ID\",_1test>", lexicalAnalysis.Lexer("_1test", true));

            Assert.AreEqual("error", lexicalAnalysis.Lexer("1test", true));
        }
Beispiel #13
0
        public void TestFiniteStateAutomatonCOMMENT()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/COMMENT.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"COMMENT\",// comment>", lexicalAnalysis.Lexer("// comment", true));
            Assert.AreEqual("<\"COMMENT\",//comment\n>", lexicalAnalysis.Lexer("//comment\n", true));
            Assert.AreEqual("<\"COMMENT\",//comment///*/*/\n>", lexicalAnalysis.Lexer("//comment///*/*/\n", true));
            Assert.AreEqual("<\"COMMENT\",/*comment///**/>", lexicalAnalysis.Lexer("/*comment///**/", true));

            Assert.AreEqual("error", lexicalAnalysis.Lexer("// comment\n no comment", true));
            Assert.AreEqual("error", lexicalAnalysis.Lexer("/*comment///*/*/", true));
        }
Beispiel #14
0
        public void TestLexicalAnalisis()
        {
            var lexicalAnalisis = new LexicalAnalysis();

            Assert.NotNull(lexicalAnalisis);

            var input = String.Empty;

            List <Lexem> lexems = LexicalAnalysis.AnalizeLine(input);

            Assert.NotNull(lexems);

            Assert.AreEqual(0, lexems.Count);
        }
Beispiel #15
0
        // 构造函数
        public MainPage()
        {
            InitializeComponent();


            string bmobId = "2bf438a29c5411c813e6e50a1aedfd0c";

            JYCaoZuo.getCaoZuo().page = this;
            JYCaoZuo.getCaoZuo().init(bmobId);


            // 用于本地化 ApplicationBar 的示例代码
            //BuildLocalizedApplicationBar();
            string daiMa = "int i = 0;while(i<100){ i = i+1;write(i);}";

            //使用方法
            //1语义分析
            LexicalAnalysis la = new LexicalAnalysis();

            string        outStr  = la.Analyze(daiMa);
            List <object> errList = la.errlist;

            Debug.WriteLine(outStr);


            Analysis ciFaFenXi = new Analysis();

            //判断语义是否有误
            if (ciFaFenXi.syntaxAalysis(la))
            {
            }
            else
            {
                Debug.WriteLine(ciFaFenXi.errInfo.ToString());
            }
            //2 执行算法
            MidCode m0 = new MidCode(ciFaFenXi);

            m0.Scan();
            Interpret runner = new Interpret();

            runner.GetRun(m0.c);
            //得到结果
            string jieGuo = runner.jieGuo;

            Debug.WriteLine(jieGuo);

            m0.clear();
        }
Beispiel #16
0
        static void Main(string[] args)
        {
            string          text     = System.IO.File.ReadAllText("python.txt");
            LexicalAnalysis analyzer = new LexicalAnalysis();

            analyzer.lexeme(text);

            while (text != null)
            {
                text = text.Trim(' ', '\t');
                string token = analyzer.GetNextLexicalAtom(ref text);
                Console.Write(token);
            }
            System.Console.Read();
        }
Beispiel #17
0
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            //执行结果
            //使用方法
            //1语义分析

            string daiMa = txtForDaiMa.Text;

            JYCaoZuo.getCaoZuo().addaoZuo("点击了执行代码:" + daiMa);
            LexicalAnalysis la = new LexicalAnalysis();

            string        outStr  = la.Analyze(daiMa);
            List <object> errList = la.errlist;

            Debug.WriteLine(outStr);


            Analysis ciFaFenXi = new Analysis();

            //判断语义是否有误
            if (ciFaFenXi.syntaxAalysis(la))
            {
            }
            else
            {
                txtForJieGuo.Text = ciFaFenXi.errInfo.ToString() + "\n" + outStr;
                Debug.WriteLine(ciFaFenXi.errInfo.ToString());
                return;
            }
            //2 执行算法
            MidCode m0 = new MidCode(ciFaFenXi);

            m0.Scan();
            Interpret runner = new Interpret();

            runner.GetRun(m0.c);
            //得到结果
            string jieGuo = runner.jieGuo;

            Debug.WriteLine(jieGuo);
            txtForJieGuo.Text = jieGuo + "\n" + outStr;

            m0.clear();
            JYCaoZuo.getCaoZuo().addaoZuo("点击了执行代码:" + daiMa + ",结果:" + txtForJieGuo.Text);
        }
        private bool Build()
        {
            try
            {
                LexicalAnalysis scanner;
                ClearAnalysersRichTextBoxes();

                string fileName = currentFileName ?? defaultFileName;

                var writeToFile = new StreamWriter(fileName, false, Encoding.Unicode);
                writeToFile.Write(textEditor.Text);
                writeToFile.Close();

                using (TextReader input = File.OpenText(fileName))
                {
                    scanner = new LexicalAnalysis(input);
                }

                FillLexicalAnalysisTab(scanner.Tokens);

                var parser = new SyntaxAnalysis(scanner.Tokens);
                FillSyntaxAnalysisTab(parser.Result);

                var generator = new Generator(parser.Result, Path.GetFileNameWithoutExtension(fileName) + ".exe");

                TextRange tr = new TextRange(outputRichTextBlock.Document.ContentStart, outputRichTextBlock.Document.ContentStart);
                tr.Text = DateTime.Now.ToString("dd/MM/yy HH:mm:ss.fff") + " | >>> " +
                          Path.GetFileNameWithoutExtension(fileName) + ".exe successfully build. \r\n";
                tr.ApplyPropertyValue(TextElement.ForegroundProperty, Brushes.Black);
            }
            catch (Exception ex)
            {
                TextRange tr = new TextRange(outputRichTextBlock.Document.ContentStart, outputRichTextBlock.Document.ContentStart);
                tr.Text = DateTime.Now.ToString("dd/MM/yy HH:mm:ss.fff") + " | " + ex.GetType().Name + " >>> Error: " +
                          ex.Message + "\r\n";

                tr.ApplyPropertyValue(TextElement.ForegroundProperty, Brushes.Red);

                ClearAndFillDefaultValuesAnalysersRichTextBoxes();
                return(false);
            }

            return(true);
        }
Beispiel #19
0
        public void TestFiniteStateAutomatons()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();

            LoadFiniteStateAutomatons(ref lexicalAnalysis);
            Directory.GetFiles(Path.Combine(Environment.CurrentDirectory, "..", "..", "TestingData", "Source")).ToList().ForEach((f) =>
            {
                string source, resultLexicalAnalysis, result;
                using (FileStream stream = new FileStream(f, FileMode.Open))
                {
                    using (StreamReader reader = new StreamReader(stream))
                    {
                        source = reader.ReadToEnd();
                    }
                }
                resultLexicalAnalysis = lexicalAnalysis.Lexer(source);

                string resultPath = Path.Combine(f, "..", "..", "Result", f.Substring(f.LastIndexOf("\\") + 1));
                if (File.Exists(resultPath))
                {
                    using (FileStream stream = new FileStream(resultPath, FileMode.Open))
                    {
                        using (StreamReader reader = new StreamReader(stream))
                        {
                            result = reader.ReadToEnd();
                        }
                    }
                    Assert.AreEqual(result, resultLexicalAnalysis);
                }
                else
                {
                    using (FileStream stream = new FileStream(resultPath, FileMode.Create))
                    {
                        using (StreamWriter writer = new StreamWriter(stream))
                        {
                            writer.Write(resultLexicalAnalysis);
                        }
                    }
                    System.Diagnostics.Process.Start(resultPath);
                }
            });
        }
Beispiel #20
0
        public void TestFiniteStateAutomatonCHAR()
        {
            LexicalAnalysis lexicalAnalysis = new LexicalAnalysis();
            string          path            = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons/CHAR.txt");

            lexicalAnalysis.LoadFiniteStateAutomatons(new string[] { path });
            Assert.AreEqual("<\"CHAR\",\'5\'>", lexicalAnalysis.Lexer("\'5\'", true));
            Assert.AreEqual("<\"CHAR\",\'а\'>", lexicalAnalysis.Lexer("\'а\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\\\\'>", lexicalAnalysis.Lexer("\'\\\\\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\u0000\'>", lexicalAnalysis.Lexer("\'\\u0000\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\uFFFF\'>", lexicalAnalysis.Lexer("\'\\uFFFF\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\u1234\'>", lexicalAnalysis.Lexer("\'\\u1234\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\xabcd\'>", lexicalAnalysis.Lexer("\'\\xabcd\'", true));
            Assert.AreEqual("<\"CHAR\",\'\\U12345678\'>", lexicalAnalysis.Lexer("\'\\U12345678\'", true));

            Assert.AreEqual("error", lexicalAnalysis.Lexer("\'\'", true));
            Assert.AreEqual("error", lexicalAnalysis.Lexer("\'123\'", true));
            Assert.AreEqual("error", lexicalAnalysis.Lexer("\'\\xABCH\'", true));
            Assert.AreEqual("error", lexicalAnalysis.Lexer("\'\\U1\'", true));
        }
        public void C_AnalysisTest()
        {
            var expression    = "b = 6;";
            var expectedNames = new List <string>()
            {
                "b", "=", "6", ";"
            };
            var expectedTypes = new List <TokenType>()
            {
                TokenType.Name,
                TokenType.Operator,
                TokenType.NumericalConstant,
                TokenType.Punctuation
            };

            var analysis = new LexicalAnalysis(expression, new Glossary());

            for (var i = 0; i < analysis.Result.Count; i++)
            {
                Assert.AreEqual(analysis.Result[i].Value, expectedNames[i]);
                Assert.AreEqual(analysis.Result[i].Type, expectedTypes[i]);
            }
        }
        private GPCSymbolItem buildSymbolItem(GPCSymbolItem rCurSymbol, int begin, ref int end)
        {
            if (originData[begin] == '{')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ObjStart, Value = "{"
                });
            }
            else if (originData[begin] == '}')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ObjEnd, Value = "}"
                });
            }
            else if (originData[begin] == '(')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ArgsStart, Value = "("
                });
            }
            else if (originData[begin] == ')')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ArgsEnd, Value = ")"
                });
            }
            else if (originData[begin] == ',')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ArgsSplit, Value = ","
                });
            }
            else if (originData[begin] == ';')
            {
                end = begin + 1;
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.ElementSplit, Value = ";"
                });
            }

            string tempWord = "";

            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.IsNotKeywordIdentifer(originData, begin, ref end)))
            {
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.Identifer, Value = tempWord
                });
            }
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isString(originData, begin, ref end)))
            {
                tempWord = tempWord.Substring(1, tempWord.Length - 2);
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.Arg, Value = tempWord
                });
            }
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isKeyword(originData, begin, ref end)))
            {
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.Arg, Value = tempWord
                });
            }
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isDigit(originData, begin, ref end)))
            {
                return(new GPCSymbolItem()
                {
                    Type = GPCSymbolType.Arg, Value = tempWord
                });
            }
            return(null);
        }
        public List <GPCSymbolObject> Parser()
        {
            int              end           = 0;
            int              i             = 0;
            GPCSymbolItem    rCurSymbol    = null;
            GPCSymbolElement rCurElem      = null;
            GPCSymbolObject  rCurSymbolObj = null;

            List <GPCSymbolObject> rSymbolObjs = new List <GPCSymbolObject>();
            Stack <GPCSymbolItem>  rNodeStack  = new Stack <GPCSymbolItem>();

            while (i < this.originData.Length)
            {
                if (!string.IsNullOrEmpty(LexicalAnalysis.isSpecialSymbol(originData, i, ref end)) || !string.IsNullOrEmpty(LexicalAnalysis.isComment(originData, i, ref end)))
                {
                    i = end;
                    continue;
                }
                rCurSymbol = buildSymbolItem(rCurSymbol, i, ref end);
                if (rCurSymbol != null)
                {
                    switch (rCurSymbol.Type)
                    {
                    case GPCSymbolType.ObjStart:
                        rCurSymbolObj        = new GPCSymbolObject();
                        rCurSymbolObj.Head   = new GPCSymbolElement(rCurElem.Identifer, rCurElem.Args);
                        rCurSymbolObj.Bodies = new List <GPCSymbolElement>();
                        break;

                    case GPCSymbolType.ObjEnd:
                        rSymbolObjs.Add(rCurSymbolObj);
                        rCurSymbolObj = null;
                        break;

                    case GPCSymbolType.ArgsStart:
                        GPCSymbolItem rPeekNode = rNodeStack.Pop();
                        rCurElem           = new GPCSymbolElement();
                        rCurElem.Identifer = rPeekNode;
                        rCurElem.Args      = new List <GPCSymbolItem>();
                        break;

                    case GPCSymbolType.ArgsEnd:
                        GPCSymbolItem rPeekNode1 = rNodeStack.Pop();
                        rCurElem.Args.Add(rPeekNode1);
                        break;

                    case GPCSymbolType.ArgsSplit:
                        GPCSymbolItem rPeekNode2 = rNodeStack.Pop();
                        rCurElem.Args.Add(rPeekNode2);
                        break;

                    case GPCSymbolType.ElementSplit:
                        rCurSymbolObj.Bodies.Add(rCurElem);
                        rCurElem = null;
                        break;

                    case GPCSymbolType.Identifer:
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case GPCSymbolType.Arg:
                        rNodeStack.Push(rCurSymbol);
                        break;

                    default:
                        break;
                    }
                    i = end;
                    continue;
                }
                i++;
            }
            return(rSymbolObjs);
        }
Beispiel #24
0
        private JsonSymbolItem buildSymbolItem(JsonSymbolItem rLastSymbol, int begin, ref int end)
        {
            if (originData[begin] == '{')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = "{", type = JsonSymbolType.ObjStart
                });
            }
            else if (originData[begin] == '}')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = "}", type = JsonSymbolType.ObjEnd
                });
            }
            else if (originData[begin] == '[')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = "[", type = JsonSymbolType.ArrayStart
                });
            }
            else if (originData[begin] == ']')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = "]", type = JsonSymbolType.ArrayEnd
                });
            }
            else if (originData[begin] == ',')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = ",", type = JsonSymbolType.ObjSplit
                });
            }
            else if (originData[begin] == ':')
            {
                end = begin + 1;
                return(new JsonSymbolItem()
                {
                    value = ":", type = JsonSymbolType.ElementSplit
                });
            }

            string tempWord = "";

            //如果是关键字、数字或者字符串
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isKeyword(originData, begin, ref end)))
            {
                JsonSymbolItem rSymbol = new JsonSymbolItem()
                {
                    value = tempWord, type = JsonSymbolType.Value, node = new JsonData(tempWord)
                };
                LexicalAnalysis.isSpecialSymbol(originData, end, ref end);
                if (originData[end] == ':')
                {
                    rSymbol.type = JsonSymbolType.Key;
                    rSymbol.node = null;
                }
                return(rSymbol);
            }
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isDigit(originData, begin, ref end)))
            {
                JsonSymbolItem rSymbol = new JsonSymbolItem()
                {
                    value = tempWord, type = JsonSymbolType.Value, node = new JsonData(tempWord)
                };
                LexicalAnalysis.isSpecialSymbol(originData, end, ref end);
                if (originData[end] == ':')
                {
                    rSymbol.type = JsonSymbolType.Key;
                    rSymbol.node = null;
                }
                return(rSymbol);
            }
            if (!string.IsNullOrEmpty(tempWord = LexicalAnalysis.isString(originData, begin, ref end)))
            {
                tempWord = tempWord.Substring(1, tempWord.Length - 2);
                JsonSymbolItem rSymbol = new JsonSymbolItem()
                {
                    value = tempWord, type = JsonSymbolType.Value, node = new JsonData(tempWord)
                };
                LexicalAnalysis.isSpecialSymbol(originData, end, ref end);
                if (originData[end] == ':')
                {
                    rSymbol.type = JsonSymbolType.Key;
                    rSymbol.node = null;
                }
                return(rSymbol);
            }
            //Debug.Log(string.Format("Json parse symbol item error! LastSymbol = {0}",
            //               rLastSymbol != null ? rLastSymbol.value : "null"));
            isValid = false;
            return(null);
        }
Beispiel #25
0
        /// <summary>
        /// 解析Json
        /// </summary>
        public JsonNode Parser()
        {
            this.isValid = true;
            int end = 0;
            int i   = 0;

            JsonSymbolItem rCurSymbol  = null;
            JsonSymbolItem rLastSymbol = null;

            Stack <JsonSymbolItem> rNodeStack = new Stack <JsonSymbolItem>();

            while (i < this.originData.Length)
            {
                //跳过那些无用的字符 ' ', '\t', '\r' '\n' 注释
                if (!string.IsNullOrEmpty(LexicalAnalysis.isSpecialSymbol(originData, i, ref end)) || !string.IsNullOrEmpty(LexicalAnalysis.isComment(originData, i, ref end)))
                {
                    i = end;
                    continue;
                }

                rCurSymbol = buildSymbolItem(rLastSymbol, i, ref end);
                if (rCurSymbol != null)
                {
                    switch (rCurSymbol.type)
                    {
                    case JsonSymbolType.Unknown:
                        Debug.LogError("Json format error.");
                        break;

                    case JsonSymbolType.ObjStart:
                        rCurSymbol.node = new JsonClass();
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case JsonSymbolType.ObjEnd:
                        JsonNode rObject0 = new JsonClass();
                        while (rNodeStack.Count != 0 && rNodeStack.Peek().type != JsonSymbolType.ObjStart)
                        {
                            var rTopSymbol = rNodeStack.Pop();
                            if (rTopSymbol.type == JsonSymbolType.ObjSplit)
                            {
                                continue;
                            }
                            else if (rTopSymbol.type == JsonSymbolType.Element)
                            {
                                rObject0.AddHead(rTopSymbol.node.Key, rTopSymbol.node[rTopSymbol.node.Key]);
                            }
                        }
                        rNodeStack.Pop();
                        var rSymbol0 = new JsonSymbolItem();
                        rSymbol0.type  = JsonSymbolType.Value;
                        rSymbol0.node  = rObject0;
                        rSymbol0.value = rObject0.ToString();
                        Generate_ElementSymbol(ref rNodeStack, rSymbol0);
                        break;

                    case JsonSymbolType.ArrayStart:
                        rCurSymbol.node = new JsonArray();
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case JsonSymbolType.ArrayEnd:
                        JsonNode rArray = new JsonArray();
                        while (rNodeStack.Peek().type != JsonSymbolType.ArrayStart)
                        {
                            var rTopSymbol = rNodeStack.Pop();
                            if (rTopSymbol.type == JsonSymbolType.ObjSplit)
                            {
                                continue;
                            }
                            else if (rTopSymbol.type == JsonSymbolType.Element)
                            {
                                rArray.AddHead(rTopSymbol.node);
                            }
                        }
                        rNodeStack.Pop();
                        var rSymbol = new JsonSymbolItem();
                        rSymbol.type  = JsonSymbolType.Value;
                        rSymbol.node  = rArray;
                        rSymbol.value = rArray.ToString();
                        Generate_ElementSymbol(ref rNodeStack, rSymbol);
                        break;

                    case JsonSymbolType.ObjSplit:
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case JsonSymbolType.ElementSplit:
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case JsonSymbolType.Key:
                        rNodeStack.Push(rCurSymbol);
                        break;

                    case JsonSymbolType.Value:
                        Generate_ElementSymbol(ref rNodeStack, rCurSymbol);
                        break;

                    default:
                        break;
                    }
                    i           = end;
                    rLastSymbol = rCurSymbol;
                    continue;
                }
                i++;
            }
            return(rNodeStack.Peek().node);
        }
Beispiel #26
0
 public void Start()
 {
     Parser          = new Parser();
     LexicalAnalysis = new LexicalAnalysis();
 }
Beispiel #27
0
 public void Stop()
 {
     Parser          = null;
     LexicalAnalysis = null;
 }
Beispiel #28
0
        private void LoadFiniteStateAutomatons(ref LexicalAnalysis lexicalAnalysis)
        {
            string path = Path.Combine(Environment.CurrentDirectory, "../../../CodeAnalyzer/FiniteStateAutomatons");

            lexicalAnalysis.LoadFiniteStateAutomatons(Directory.GetFiles(path));
        }