コード例 #1
0
ファイル: ParserForm.cs プロジェクト: mind0n/hive
        private void bParse_Click(object sender, EventArgs e)
        {
			string script = txtGrid.ScriptBox.Text;
			Tokens tokens = new Tokens();
			for (int i = 0; i < script.Length; i++)
			{
				tokens.Add(script[i]);
			}
			tokens.Flush();
			txtGrid.GridBox.DataSource = tokens.Content;
			tokens.PrintCache();
        }
コード例 #2
0
        /// <summary>
        /// Initialises a new instance of the CustomAttributeSigniture class.
        /// </summary>
        /// <param name="signiture">The byte contents of the signiture.</param>
        /// <exception cref="InvalidOperationException">
        /// Thrown when a value for the prolog differs from 0x0001. This indicates
        /// the incorrect signiture type is being read or the signiture contents
        /// are invalid.
        /// </exception>
        public CustomAttributeSignature(byte[] signiture) : base(Signatures.CustomAttribute)
        {
            Offset offset = 0;

            // Prolog (0x00001) always and only one instance
            PrologSignatureToken prolog = new PrologSignatureToken(signiture, offset);

            Tokens.Add(prolog);

            // TODO: Incomplete
            //  Fixed arguments
            //  Num named arguments
            //  Named arguments
        }
コード例 #3
0
        /// <summary>
        /// Initialises a new TypeSigniture from the <paramref name="signiture"/> starting at the
        /// specified <paramref name="offset"/>.
        /// </summary>
        /// <param name="signiture">The signiture to parse the type from.</param>
        /// <param name="offset">The offset to start reading from.</param>
        public TypeSignatureToken(byte[] signiture, Offset offset)
            : base(SignatureTokens.Type)
        {
            ElementTypeSignatureToken type = new ElementTypeSignatureToken(signiture, offset);
            TypeSignatureToken        childType;

            Tokens.Add(type);
            ElementType = type;

            switch (type.ElementType)
            {
            case ElementTypes.SZArray:
                while (CustomModifierToken.IsToken(signiture, offset))
                {
                    CustomModifierToken modifier = new CustomModifierToken(signiture, offset);
                    Tokens.Add(modifier);
                }
                childType = new TypeSignatureToken(signiture, offset);
                Tokens.Add(childType);
                break;

            case ElementTypes.Ptr:
                while (CustomModifierToken.IsToken(signiture, offset))
                {
                    CustomModifierToken modifier = new CustomModifierToken(signiture, offset);
                    Tokens.Add(modifier);
                }
                childType = new TypeSignatureToken(signiture, offset);
                Tokens.Add(childType);
                break;

            case ElementTypes.GenericInstance:
                ElementTypeSignatureToken genericType = new ElementTypeSignatureToken(signiture, offset);
                Tokens.Add(genericType);
                GenericArgumentCountSignatureToken argCount = new GenericArgumentCountSignatureToken(signiture, offset);
                Tokens.Add(argCount);
                for (int i = 0; i < argCount.Count; i++)
                {
                    TypeSignatureToken genArgType = new TypeSignatureToken(signiture, offset);
                    Tokens.Add(genArgType);
                }
                break;

            case ElementTypes.Array:
                childType = new TypeSignatureToken(signiture, offset);
                Tokens.Add(childType);
                Tokens.Add(new ArrayShapeSignatureToken(signiture, offset));
                break;
            }
        }
コード例 #4
0
        public Parser(string source)
        {
            var scanner = new Scanner(source);
            var tokens  = scanner.Scan();

            foreach (var token in tokens)
            {
                if (token.Kind != TokenKind.ERROR)
                {
                    Tokens.Add(token);
                }
            }
            ErrorMessage.AddRange(scanner.ErrorMessage);
        }
コード例 #5
0
        public virtual CypherStatementModifier Tokenize(string cypherStatement)
        {
            BaseStatement = cypherStatement ?? throw new ArgumentNullException(nameof(cypherStatement));
            Tokens.Clear();

            int i = 0;

            foreach (Match m in Regex.Matches(BaseStatement, TokenizerRegexp, RegexOptions.IgnoreCase))
            {
                Tokens.Add($"#{i++}#", m);
            }

            return(this);
        }
コード例 #6
0
        public void AddToken(UserToken <string> token)
        {
            var existingToken =
                Tokens.SingleOrDefault(t => t.LoginProvider == token.LoginProvider && t.TokenName == token.TokenName);

            if (existingToken == null)
            {
                Tokens.Add(token);
            }
            else
            {
                existingToken.TokenValue = token.TokenValue;
            }
        }
コード例 #7
0
ファイル: Lexer_Nss.cs プロジェクト: Liareth/nss2csharp
        private int Preprocessor(int chBaseIndex, string data)
        {
            char ch = data[chBaseIndex];

            if (ch == '#')
            {
                // Just scan for a new line or eof, then add this in.
                int chScanningIndex = chBaseIndex;

                while (++chScanningIndex <= data.Length)
                {
                    bool eof = chScanningIndex >= data.Length - 1;

                    bool proceed = eof;
                    if (!proceed)
                    {
                        char chScanning = data[chScanningIndex];
                        proceed = NssSeparator.Map.ContainsKey(chScanning) &&
                                  NssSeparator.Map[chScanning] == NssSeparators.NewLine;
                    }

                    if (proceed)
                    {
                        NssPreprocessor preprocessor = new NssPreprocessor();
                        preprocessor.m_PreprocessorType = NssPreprocessorType.Unknown;

                        int chStartIndex = chBaseIndex;
                        int chEndIndex   = eof ? data.Length : chScanningIndex;

                        if (chStartIndex == chEndIndex)
                        {
                            preprocessor.m_Data = "";
                        }
                        else
                        {
                            preprocessor.m_Data = data.Substring(chStartIndex, chEndIndex - chStartIndex);
                        }

                        int chNewBaseIndex = chEndIndex;
                        AttachDebugData(preprocessor, DebugRanges, chBaseIndex, chNewBaseIndex - 1);

                        Tokens.Add(preprocessor);
                        chBaseIndex = chNewBaseIndex;
                        break;
                    }
                }
            }

            return(chBaseIndex);
        }
コード例 #8
0
        public Node Parse()
        {
            if (!Tokens.Any())
            {
                return(null);
            }

            if (Tokens.Last().TokenType != FilterTokenType.ExprEnd)
            {
                Tokens.Add(new Token(FilterTokenType.ExprEnd, null, null));
            }

            return(ParseExpression());
        }
コード例 #9
0
ファイル: Lexer_Nss.cs プロジェクト: Liareth/nss2csharp
        private int Keyword(int chBaseIndex, string data)
        {
            char ch = data[chBaseIndex];

            if (Tokens.Count == 0 ||
                Tokens.Last().GetType() == typeof(NssSeparator) ||
                Tokens.Last().GetType() == typeof(NssOperator))
            {
                foreach (KeyValuePair <string, NssKeywords> kvp in NssKeyword.Map)
                {
                    if (chBaseIndex + kvp.Key.Length >= data.Length)
                    {
                        continue; // This would overrun us.
                    }

                    string strFromData = data.Substring(chBaseIndex, kvp.Key.Length);
                    if (strFromData == kvp.Key)
                    {
                        // We're matched a keyword, e.g. 'int ', but we might have, e.g. 'int integral', and the
                        // 'integral' is an identifier. So let's only accept a keyword if the character proceeding it
                        // is a separator or an operator.

                        int  chNextAlongIndex = chBaseIndex + kvp.Key.Length;
                        bool accept           = false;

                        if (!accept)
                        {
                            char chNextAlong = data[chNextAlongIndex];
                            accept = NssSeparator.Map.ContainsKey(chNextAlong) || NssOperator.Map.ContainsKey(chNextAlong);
                        }

                        if (accept)
                        {
                            NssKeyword keyword = new NssKeyword();
                            keyword.m_Keyword = kvp.Value;

                            int chNewBaseIndex = chNextAlongIndex;
                            AttachDebugData(keyword, DebugRanges, chBaseIndex, chNewBaseIndex - 1);

                            Tokens.Add(keyword);
                            chBaseIndex = chNewBaseIndex;
                            break;
                        }
                    }
                }
            }

            return(chBaseIndex);
        }
コード例 #10
0
            public void CanTranslateExpressionWithSingleOperand()
            {
                // Prepare
                var x = new FakeEvaluableExpression();

                Tokens.Add(x);

                // Act
                var result = Parser.TransformInfixToPostfix(Tokens).ToList();

                // Verify
                Assert.IsNotNull(result);
                Assert.IsTrue(result.Count() == 1);
                Assert.IsTrue(result.First() == x);
            }
コード例 #11
0
 private void AddToken()
 {
     if (!Model.Tokens.Any(x => x.Content == Input && x.Type == SelectedType))
     {
         Tokens.Add(new LanguageTokenViewModel(new Token()
         {
             Content  = Input,
             Language = Language,
             Type     = SelectedType
         }));
     }
     else
     {
     }
 }
コード例 #12
0
        void IPortalRequest.AddToken(string name, string value)
        {
            Argument.Assert.IsNotNullOrEmpty(name, nameof(name));

            if (!Compare.IsNullOrEmpty(value))
            {
                value = value.Trim(segmentSeperatorCharArray);
            }

            if (Tokens == null)
            {
                Tokens = new RouteTokens();
            }
            Tokens.Add(name, value);
        }
コード例 #13
0
ファイル: Tokenizer.cs プロジェクト: lagdotcom/dosjun-editor
        protected void AddToken(TokenType tt, string replace = "")
        {
            if (!string.IsNullOrWhiteSpace(replace))
            {
                currentToken = replace;
            }

            if (!string.IsNullOrWhiteSpace(currentToken))
            {
                Tokens.Add(new Token {
                    Type = tt, Value = currentToken
                });
                currentToken = string.Empty;
            }
        }
コード例 #14
0
 public ToDoTask()
 {
     Tokens.Add(new Token()
     {
         Value = "TODO"
     });
     Tokens.Add(new Token()
     {
         Value = "HACK"
     });
     Tokens.Add(new Token()
     {
         Value = "FIXME"
     });
 }
コード例 #15
0
ファイル: DemoService.cs プロジェクト: galdo06/Apicem
        private OAuthResponse CreateAccessToken(string name)
        {
            var token = new DemoToken(name);

            Tokens.Add(token);

            return(new OAuthResponse
            {
                AccessToken = token.AccessToken,
                Expires = token.ExpireSeconds,
                RefreshToken = token.RefreshToken,
                RequireSsl = false,
                Success = true
            });
        }
コード例 #16
0
ファイル: TokenSequence.cs プロジェクト: mosamy/vulcan
        private void Tokenize()
        {
            string[] tokenStrings = Regex.Split(Input, Constants.DelimiterRegex);

            foreach (var tokenString in tokenStrings)
            {
                var token = new Token(tokenString);
                if (token.TokenType == TokenType.ArgumentPrefix)
                {
                    ++_unresolvedTemplateArgumentCount;
                }

                Tokens.Add(token);
            }
        }
コード例 #17
0
 protected State PushTokenToTokenList(string token)
 {
     Tokens.Add(token);
     if (operators.Any(op => token.EndsWith(op)))
     {
         return(new WaitingForAnyTokenState()
         {
             Expressions = Expressions, Tokens = Tokens
         });
     }
     return(new WaitingForOperationState()
     {
         Expressions = Expressions, Tokens = Tokens
     });
 }
コード例 #18
0
        /// <summary>
        /// Create a list of CSS tokens given any text source
        /// </summary>
        public TokenList Tokenize(ITextProvider textProvider, int start, int length, bool keepWhiteSpace)
        {
            Debug.Assert(start >= 0 && length >= 0 && start + length <= textProvider.Length);

            InitStream(textProvider, start, keepWhiteSpace);

            while (CS.Position < start + length && AddNextTokenWrapper())
            {
                // Keep on adding tokens...
            }

            Tokens.Add(CssToken.EndOfFileToken(textProvider));

            return(Tokens);
        }
コード例 #19
0
        protected override ITextRange HandleString(bool addToken = true)
        {
            int  start = _cs.Position;
            char quote = _cs.CurrentChar;

            // since the escape char is exactly the string openning char we say we start in escaped mode
            // it will get reset by the first char regardless what it is, but it will keep the '' case honest
            _cs.MoveToNextChar();

            while (!_cs.IsEndOfStream() && !_cs.IsAtNewLine())
            {
                if (_cs.CurrentChar == '\\' && _cs.NextChar == quote)
                {
                    _cs.Advance(2);
                }

                if (_cs.CurrentChar == quote)
                {
                    _cs.MoveToNextChar();
                    break;
                }

                if (_cs.CurrentChar == '<' && (_cs.NextChar == '/' || Char.IsLetter(_cs.NextChar)))
                {
                    if (_cs.Position > start)
                    {
                        Tokens.Add(GetStringToken(start, _cs.Position - start));
                    }

                    OnHtml();

                    start = _cs.Position;
                }
                else
                {
                    _cs.MoveToNextChar();
                }
            }

            var range = TextRange.FromBounds(start, _cs.Position);

            if (range.Length > 0)
            {
                Tokens.Add(GetStringToken(start, range.Length));
            }

            return(range);
        }
コード例 #20
0
        /// <summary>
        /// Adds a store of certificates to the list of certificates to decrypt with.
        ///
        /// </summary>
        /// <param name="storeName">store name of the certificates</param>
        /// <param name="storeLocation">store location</param>
        public static void AddDecryptionCertificates(StoreName storeName, StoreLocation storeLocation)
        {
            X509Store store = new X509Store(storeName, storeLocation);

            store.Open(OpenFlags.ReadOnly);

            foreach (X509Certificate2 certificate in store.Certificates)
            {
                if (certificate.HasPrivateKey)
                {
                    Tokens.Add(new X509SecurityToken(certificate));
                }
            }

            store.Close();
        }
コード例 #21
0
        /// <summary>
        /// Adds a certificate to the list of certificates to decrypt with.
        ///
        /// </summary>
        /// <param name="storeName">store name of the certificate</param>
        /// <param name="storeLocation">store location</param>
        /// <param name="thumbprint">thumbprint of the cert to use</param>
        public static void AddDecryptionCertificate(StoreName storeName, StoreLocation storeLocation, string thumbprint)
        {
            X509Store store = new X509Store(storeName, storeLocation);

            store.Open(OpenFlags.ReadOnly);

            foreach (X509Certificate2 certificate in store.Certificates.Find(X509FindType.FindByThumbprint, thumbprint, true))
            {
                if (certificate.HasPrivateKey)
                {
                    Tokens.Add(new X509SecurityToken(certificate));
                }
            }

            store.Close();
        }
コード例 #22
0
        private string ScrapeTokenAndClickBack()
        {
            string token = Driver.FindElements(By.TagName("b"))[0].Text;

            Tokens.Add(token);
            Console.WriteLine("got token: " + token);
            IWebElement backAnchorTag = Driver.FindElement(By.CssSelector("p > a"));

            IJavaScriptExecutor jsExecuter = (IJavaScriptExecutor)Driver;

            jsExecuter.ExecuteScript("document.querySelector('p > a').scrollIntoView(true)");
            Actions successActions = new Actions(Driver);

            successActions.Click(backAnchorTag).Perform();
            return(token);
        }
コード例 #23
0
ファイル: TuneLexer.cs プロジェクト: ModernMAK/ABCSharp
        public bool TryLexHeader(string line, int lineIndex)
        {
            if (line.Length <= 2 || (!InfoFields.IsHeader(line, InfoFields.TuneBodyInfoKeys) &&
                                     !InfoFields.IsHeaderContinuation(line)))
            {
                return(false);
            }

            var bodyFieldToken = new BodyFieldToken()
            {
                Line = lineIndex, Char = 0, Text = line
            };

            Tokens.Add(bodyFieldToken);
            return(true);
        }
コード例 #24
0
            public void CanTranslateExpressionWithUnaryOperator()
            {
                // Prepare (NOT x)
                var x   = new FakeEvaluableExpression();
                var not = new NotExpression();

                Tokens.Add(not);
                Tokens.Add(x);

                // Act
                var result = Parser.TransformInfixToPostfix(Tokens).ToList();

                // Verify
                Assert.AreSame(x, result[0]);
                Assert.AreSame(not, result[1]);
            }
コード例 #25
0
        public void Tokenize(string text)
        {
            _text = text;
            _parenthesesCounter = 0;
            _curlyBracesCounter = 0;
            int     textPos = 0;
            IAToken token   = GetNextToken(ref textPos);

            while (token != null)
            {
                Tokens.Add(token);
                token = GetNextToken(ref textPos);
            }

            CheckThatParenthesesMatch();
        }
コード例 #26
0
ファイル: CoreNLPClient.cs プロジェクト: GregWickham/Echo
        /// <summary>Return a new <see cref="ParseResult"/> constructed from <paramref name="annotation"/></summary>
        internal ParseResult(Annotation annotation)
        {
            java.util.AbstractList sentences = annotation.get(SentencesAnnotationClass) as java.util.AbstractList;
            CoreMap sentence = sentences.get(0) as CoreMap;
            LabeledScoredTreeNode constituencyParse = sentence.get(TreeAnnotationClass) as LabeledScoredTreeNode;
            // Skip the ROOT
            Tree childOfRoot = constituencyParse.firstChild();

            Constituents = childOfRoot;
            Constituents.indexLeaves();

            // Build the collection of tokens
            var parsedTokens = sentence.get(TokensAnnotationClass) as java.util.AbstractList;
            var mentions     = sentence.get(MentionsAnnotationClass);

            for (int tokenIndex = 0; tokenIndex < parsedTokens.size(); tokenIndex++)
            {
                CoreLabel source        = parsedTokens.get(tokenIndex) as CoreLabel;
                var       tokenMentions = source.get(MentionTokenAnnotationClass);
                var       tokenGender   = source.get(GenderAnnotationClass);
                Tokens.Add(new ParseToken
                {
                    Index            = source.index(),
                    Word             = source.word(),
                    Lemma            = source.lemma(),
                    PartOfSpeech     = source.get(PartOfSpeechAnnotationClass) as string,
                    NamedEntityClass = source.get(NamedEntityTagAnnotationClass) as string,
                });
            }

            // Create the list of dependencies between tokens
            SemanticGraph dependencyGraph = sentence.get(DependencyAnnotationClass) as SemanticGraph;

            //java.util.List dependencies = dependencyGraph.edgeListSorted();
            java.util.Iterator dependencyGraphEdges = dependencyGraph.edgeIterable().iterator();
            while (dependencyGraphEdges.hasNext())
            {
                SemanticGraphEdge edge = dependencyGraphEdges.next() as SemanticGraphEdge;

                string      relationName      = edge.getRelation().getShortName();
                string      relationSpecifier = edge.getRelation().getSpecific();
                IndexedWord governor          = edge.getGovernor();
                IndexedWord dependent         = edge.getDependent();

                Dependencies.Add((relationName, relationSpecifier, governor.index(), dependent.index()));
            }
        }
コード例 #27
0
        public List <Token> ScanTokens(string Source)
        {
            this.Source  = Source;
            this.Tokens  = new List <Token>();
            this.Start   = 0;
            this.Current = 0;
            this.Line    = 1;

            while (Current < Source.Length)
            {
                Start = Current;
                ScanToken();
            }
            Tokens.Add(new Token(TokenType.EOF, null, null, Line));

            return(Tokens);
        }
コード例 #28
0
        public void SetToken(string provider, string name, string value)
        {
            var existingToken = GetToken(provider, name);

            if (existingToken != null)
            {
                existingToken.Value = value;
                return;
            }

            Tokens.Add(new AppUserToken
            {
                LoginProvider = provider,
                Name          = name,
                Value         = value
            });
        }
コード例 #29
0
ファイル: TuneLexer.cs プロジェクト: ModernMAK/ABCSharp
        private bool TryLexContinuation(string line, int lineIndex, ref int charIndex)
        {
            if (line.Length - 1 != charIndex || line[charIndex] != '\\')
            {
                return(false);
            }
            var token = new ContinuationToken()
            {
                Line = lineIndex,
                Char = charIndex,
                Text = "\\"
            };

            Tokens.Add(token);
            charIndex += token.Text.Length;
            return(true);
        }
コード例 #30
0
ファイル: TuneLexer.cs プロジェクト: ModernMAK/ABCSharp
        private bool TryLexSlur(string part, int lineIndex, ref int charIndex)
        {
            if (part[0] != '(' && part[0] != ')')
            {
                return(false);
            }
            var token = new SlurToken()
            {
                Line = lineIndex,
                Char = charIndex,
                Text = part[0].ToString(),
            };

            Tokens.Add(token);
            charIndex += token.Text.Length;
            return(true);
        }
コード例 #31
0
        public virtual void SetToken(string loginProvider, string name, string value)
        {
            var existingToken = GetToken(loginProvider, name);

            if (existingToken != null)
            {
                existingToken.Value = value;
                return;
            }

            Tokens.Add(new IdentityUserToken
            {
                LoginProvider = loginProvider,
                Name          = name,
                Value         = value
            });
        }