public ParserResult(TokenInfo token, bool isSuccessed, LambdaExpression expr, SyntaxException error = null) { IsSuccessed = isSuccessed; Expression = expr; Token = token; Error = error; }
private static Run CreateTextRun(string code, TokenInfo token) { var text = code.Substring(token.SourceSpan.Start.Index, token.SourceSpan.Length); var result = new Run(text); var style = _colorizationStyles.ContainsKey(token.Category) ? _colorizationStyles[token.Category] : _colorizationStyles[TokenCategory.None]; result.Style = Application.Current.FindResource(style) as Style; return result; }
private ChannelInformations GetChannelInformation(string baseUrl, TokenInfo googleAuthorization, HttpClient httpClient) { var getChannels = baseUrl + "/channels?part=contentDetails&mine=true"; var channelResponse = MakeYoutubeGetRequest(googleAuthorization, httpClient, getChannels); var channelInformation = new JavaScriptSerializer().Deserialize<ChannelInformations>(channelResponse); return channelInformation; }
private ParserResult MapRuleAction(ParserResult other) { Type source = Token.SourceType; Type target = other.Token.TargetType; var invoker = (IInvoker)Utilities.CreateType(typeof(MapGroupInvoker<,>), source, target) .CreateInstance(Expression, other.Expression); var expr = invoker.Invoke(); var token = new TokenInfo(RegisterKeys.MapRule, source, target); return new ParserResult(token, true, expr); }
public static bool IsUnaryOperator(TokenInfo token) { switch (token.Token) { case Token.Plus: case Token.Minus: case Token.Not: return true; } return false; }
private PlayListInformations GetPlayListInformation(ChannelInformations channelInformation, string baseUrl, TokenInfo googleAuthorization, HttpClient httpClient) { PlayListInformations playlistInformation = new PlayListInformations(); foreach (var item in channelInformation.items) { var playlistItemUrl = baseUrl + "/playlistItems?part=snippet&playlistId=" + item.contentDetails.relatedPlaylists.uploads; var playlistResponse = MakeYoutubeGetRequest(googleAuthorization, httpClient, playlistItemUrl); playlistInformation = new JavaScriptSerializer().Deserialize<PlayListInformations>(playlistResponse); } return playlistInformation; }
/// <summary> /// 为客户端创建令牌 /// </summary> /// <returns></returns> public string CreateToken() { string returnStr = ""; if (Signature != GetParam("sig").ToString()) { ErrorCode = (int)ErrorType.API_EC_SIGNATURE; return returnStr; } //应用程序类型为Web的时候应用程序没有调用此方法的权限 if (this.App.ApplicationType == (int)ApplicationType.WEB) { ErrorCode = (int)ErrorType.API_EC_PERMISSION_DENIED; return returnStr; } OnlineUserInfo oluserinfo = OnlineUsers.UpdateInfo(Config.Passwordkey, Config.Onlinetimeout); int olid = oluserinfo.Olid; string expires = string.Empty; DateTime expireUTCTime; TokenInfo token = new TokenInfo(); if (System.Web.HttpContext.Current.Request.Cookies["dnt"] == null || System.Web.HttpContext.Current.Request.Cookies["dnt"]["expires"] == null) { token.Token = ""; if (Format == FormatType.JSON) returnStr = ""; else returnStr = SerializationHelper.Serialize(token); return returnStr; } expires = System.Web.HttpContext.Current.Request.Cookies["dnt"]["expires"].ToString(); ShortUserInfo userinfo = Discuz.Forum.Users.GetShortUserInfo(oluserinfo.Userid); expireUTCTime = DateTime.Parse(userinfo.Lastvisit).ToUniversalTime().AddSeconds(Convert.ToDouble(expires)); expires = Utils.ConvertToUnixTimestamp(expireUTCTime).ToString(); string time = string.Empty; if (oluserinfo == null) time = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"); else time = DateTime.Parse(oluserinfo.Lastupdatetime).ToString("yyyy-MM-dd HH:mm:ss"); string authToken = Common.DES.Encode(string.Format("{0},{1},{2}", olid.ToString(), time, expires), this.Secret.Substring(0, 10)).Replace("+", "["); token.Token = authToken; if (Format == FormatType.JSON) returnStr = authToken; else returnStr = SerializationHelper.Serialize(token); return returnStr; }
public static bool IsArithmeticOperator(TokenInfo token) { switch (token.Token) { case Token.Plus: case Token.Minus: case Token.Slash: case Token.Star: case Token.Percent: case Token.Mod: return true; } return false; }
public static bool IsBinaryOperator(TokenInfo token) { switch (token.Token) { case Token.Plus: case Token.Minus: case Token.Slash: case Token.Star: case Token.Percent: case Token.LeftAngleBracket: case Token.RightAngleBracket: case Token.Is: case Token.IsNot: case Token.Mod: case Token.Equality: case Token.Inequality: case Token.Period: return true; } return false; }
public override bool Run(CommandParameter commandParam, ref string result) { if (commandParam.AppInfo.ApplicationType == (int)ApplicationType.WEB) { result = Util.CreateErrorMessage(ErrorType.API_EC_PERMISSION_DENIED, commandParam.ParamList); return false; } TokenInfo token = new TokenInfo(); if (System.Web.HttpContext.Current.Request.Cookies["dnt"] == null || System.Web.HttpContext.Current.Request.Cookies["dnt"]["expires"] == null) { token.Token = ""; result = commandParam.Format == FormatType.JSON ? string.Empty : SerializationHelper.Serialize(token); return true; } OnlineUserInfo oluserinfo = OnlineUsers.UpdateInfo(commandParam.GeneralConfig.Passwordkey, commandParam.GeneralConfig.Onlinetimeout); int olid = oluserinfo.Olid; string expires = string.Empty; DateTime expireUTCTime; expires = System.Web.HttpContext.Current.Request.Cookies["dnt"]["expires"].ToString(); ShortUserInfo userinfo = Discuz.Forum.Users.GetShortUserInfo(oluserinfo.Userid); expireUTCTime = DateTime.Parse(userinfo.Lastvisit).ToUniversalTime().AddSeconds(Convert.ToDouble(expires)); expires = Utils.ConvertToUnixTimestamp(expireUTCTime).ToString(); string time = string.Empty; if (oluserinfo == null) time = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss"); else time = DateTime.Parse(oluserinfo.Lastupdatetime).ToString("yyyy-MM-dd HH:mm:ss"); string authToken = Common.DES.Encode(string.Format("{0},{1},{2}", olid.ToString(), time, expires), commandParam.AppInfo.Secret.Substring(0, 10)).Replace("+", "["); token.Token = authToken; result = commandParam.Format == FormatType.JSON ? authToken : SerializationHelper.Serialize(token); return true; }
public Videos GetVideoList(TokenInfo tokenInfo) { var httpClient = new HttpClient(); var baseUrl = "https://www.googleapis.com/youtube/v3"; var channelInformation = GetChannelInformation(baseUrl, tokenInfo, httpClient); var playlistInformation = GetPlayListInformation(channelInformation, baseUrl, tokenInfo, httpClient); var video = playlistInformation.items.Select(x => new Video { Description = x.snippet.description, Name = x.snippet.title, ThumbnailUrl = x.snippet.thumbnails.high.url, VideoUrl = @"http://www.youtube.com/embed/" + x.snippet.resourceId.VideoId, PublishedDate = x.snippet.publishedAt }).ToList(); var videos = new Videos { VideoList = video }; return videos; }
private int GetLeadingMultiLineStrings(Tokenizer tokenizer, ITextSnapshot snapshot, int firstLine, int currentLine, out int validPrevLine, ref TokenInfo startToken) { validPrevLine = currentLine; int prevLine = currentLine - 1; int length = 0; while (prevLine >= 0) { LineTokenization prevLineTokenization; if (!_tokenCache.TryGetTokenization(prevLine, out prevLineTokenization)) { LineTokenization lineTokenizationTemp; int currentLineTemp = _tokenCache.IndexOfPreviousTokenization(firstLine, 0, out lineTokenizationTemp) + 1; object stateTemp = lineTokenizationTemp.State; while (currentLineTemp <= snapshot.LineCount) { if (!_tokenCache.TryGetTokenization(currentLineTemp, out lineTokenizationTemp)) { lineTokenizationTemp = TokenizeLine(tokenizer, snapshot, stateTemp, currentLineTemp); _tokenCache[currentLineTemp] = lineTokenizationTemp; } stateTemp = lineTokenizationTemp.State; } prevLineTokenization = TokenizeLine(tokenizer, snapshot, stateTemp, prevLine); _tokenCache[prevLine] = prevLineTokenization; } if (prevLineTokenization.Tokens.Length != 0) { if (prevLineTokenization.Tokens[prevLineTokenization.Tokens.Length - 1].Category != TokenCategory.IncompleteMultiLineStringLiteral) { break; } startToken = prevLineTokenization.Tokens[prevLineTokenization.Tokens.Length - 1]; length += startToken.SourceSpan.Length; } validPrevLine = prevLine; prevLine--; if (prevLineTokenization.Tokens.Length > 1) { // http://pytools.codeplex.com/workitem/749 // if there are multiple tokens on this line then our multi-line string // is terminated. break; } } return length; }
private static Span SnapshotSpanToSpan(ITextSnapshot snapshot, TokenInfo token, int lineNumber) { var line = snapshot.GetLineFromLineNumber(lineNumber); var index = line.Start.Position + token.SourceSpan.Start.Column - 1; var tokenSpan = new Span(index, token.SourceSpan.Length); return tokenSpan; }
public ActionResult HPTransaction(TokenInfo tokenInfo) { if (tokenInfo != null) { //STEPS: //1. get token info //2. validate token //3. complete payment //get token info using (var _httpClient = new HttpClient()) { _httpClient.BaseAddress = new Uri(@"https://test.hellopaisa.com.np/"); _httpClient.DefaultRequestHeaders.Accept.Clear(); _httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); //make the request for the token info HttpResponseMessage _responseForTokenInfo = _httpClient.GetAsync("api/TokenInfo?token=" + tokenInfo.AccessToken).Result; //if the status code is success, then if (_responseForTokenInfo.IsSuccessStatusCode) { //retrive the token info object var _tokenInfo = _responseForTokenInfo.Content.ReadAsAsync <TokenInfo>().Result; //now check the validation of the tokenInfo var _responseForTokenValidation = _httpClient.PostAsJsonAsync("api/ValidateTokenInfo", _tokenInfo); var _tokenValidationStatus = _responseForTokenValidation.Result.Content.ReadAsAsync <TokenStatus>().Result; //check if the token status is valid or not if (_tokenValidationStatus.IsTokenValid == true) { //enter the transaction OTP, entered by the user _tokenInfo.TransactionOTP = tokenInfo.TransactionOTP; //call the complete payment function var _paymentResponse = _httpClient.PostAsJsonAsync("api/CompletePayment", _tokenInfo); var _transactionResponse = _paymentResponse.Result.Content.ReadAsAsync <Transaction>().Result; if (_transactionResponse != null) { //check the status of the transaction, if it's validity is true and the responseCode is 0, the the transaction is successful. if (_transactionResponse.Validity == true && _transactionResponse.ResponseCode == 0) { //success var msg = " Your transaction is " + _transactionResponse.ResponseMessage + ", Transaction Trace ID=" + _transactionResponse.TransactionTraceID; ViewBag.Message = msg; } else { //transaction failed var errorMsg = "Transaction Failed. " + _transactionResponse.ResponseMessage + " , error Code=" + _transactionResponse.ResponseCode; ViewBag.Message = errorMsg; } } } else { ViewBag.Message = "Invalid TokenInfo Status"; } } else { ViewBag.Message = "TokenInfo not found." + _responseForTokenInfo.StatusCode; } } } return(View()); }
public virtual void MatchBraces(IVsTextView textView, int line, int idx, TokenInfo info) { this.BeginParse(line, idx, info, ParseReason.HighlightBraces, textView, new ParseResultHandler(this.HandleMatchBracesResponse)); }
public virtual void Completion( IVsTextView textView, TokenInfo info, bool completeWord ) { int line; int idx; textView.GetCaretPos( out line, out idx ); this.completeWord = completeWord; ParseReason reason = completeWord ? ParseReason.CompleteWord : ParseReason.MemberSelect; this.BeginParse(line, idx, info, reason, textView, new ParseResultHandler(this.HandleCompletionResponse)); }
public bool GetWordExtent(int line, int idx, WORDEXTFLAGS flags, out int startIdx, out int endIdx) { Debug.Assert(line >=0 && idx >= 0); startIdx = endIdx = 0; //get the character classes TokenInfo[] lineInfo = this.colorizer.GetLineInfo(line, this.colorState); if (lineInfo == null) return false; int count = lineInfo.Length; TokenInfo info = new TokenInfo(); int index = this.GetTokenInfoAt(lineInfo, idx, ref info); if (index<0) return false; //don't do anything in comment or text or literal space if (info.type == TokenType.Comment || info.type == TokenType.LineComment || info.type == TokenType.Embedded || info.type == TokenType.Text || info.type == TokenType.String || info.type == TokenType.Literal ) return false; //search for a token switch (flags & WORDEXTFLAGS.WORDEXT_MOVETYPE_MASK) { case WORDEXTFLAGS.WORDEXT_PREVIOUS: index--; while (index >= 0 && ! MatchToken(flags, lineInfo[index])) index--; if (idx < 0) return false; break; case WORDEXTFLAGS.WORDEXT_NEXT: idx++; while (index < count && !MatchToken(flags,lineInfo[index])) index++; if (index >= count) return false; break; case WORDEXTFLAGS.WORDEXT_NEAREST: { int prevIdx = index; prevIdx--; while (prevIdx >= 0 && !MatchToken(flags,lineInfo[prevIdx])) prevIdx--; int nextIdx = index; while (nextIdx < count && !MatchToken(flags,lineInfo[nextIdx])) nextIdx++; if (prevIdx < 0 && nextIdx >= count) return false; else if (nextIdx >= count) index = prevIdx; else if (prevIdx < 0) index = nextIdx; else if (index - prevIdx < nextIdx - index) index = prevIdx; else index = nextIdx; break; } case WORDEXTFLAGS.WORDEXT_CURRENT: default: if (!MatchToken(flags, info)) return false; break; } info = lineInfo[index]; //we found something, set the span startIdx = info.startIndex; endIdx = info.endIndex; if (index<lineInfo.Length) endIdx = lineInfo[index+1].startIndex; return true; }
public override Microsoft.VisualStudio.Package.Declarations GetDeclarations(IVsTextView view, int line, int col, TokenInfo info, ParseReason reason) { return(Declarations); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int /*state*/ prevLineNumber) { if (!isScanningLine) { scanner.RestoreState(lineState[prevLineNumber]); isScanningLine = true; } Token token; try { token = scanner.GetToken(); if (token == null && scanner.EOF) { prevLineNumber = prevLineNumber + 1; if (lineState.Count == prevLineNumber) { lineState.Add(scanner.RetrieveState()); } else { lineState[prevLineNumber] = scanner.RetrieveState(); } isScanningLine = false; return(false); } } catch (ScannerError) { isScanningLine = false; return(false); } if (token == null || token.Type == TokenType.Error) { return(false); } NVelocityTokenColor color = NVelocityTokenColor.XmlText; switch (token.Type) { // +==========================+ // | NVelocity Tokens | // +==========================+ case TokenType.NVText: case TokenType.NVEscapeDirective: case TokenType.NVComma: case TokenType.NVDoubleDot: color = NVelocityTokenColor.NVText; break; case TokenType.NVTrue: case TokenType.NVFalse: case TokenType.NVIn: case TokenType.NVWith: color = NVelocityTokenColor.NVKeyword; break; case TokenType.NVSingleLineComment: case TokenType.NVMultilineCommentStart: case TokenType.NVMultilineCommentEnd: case TokenType.NVMultilineComment: color = NVelocityTokenColor.NVComment; break; case TokenType.NVDollar: case TokenType.NVIdentifier: case TokenType.NVReferenceLCurly: case TokenType.NVReferenceRCurly: case TokenType.NVReferenceSilent: case TokenType.NVDot: color = NVelocityTokenColor.NVIdentifier; break; case TokenType.NVStringLiteral: case TokenType.NVDoubleQuote: case TokenType.NVSingleQuote: color = NVelocityTokenColor.NVString; break; case TokenType.NVIntegerLiteral: //case TokenType.NVFloatingPoint: color = NVelocityTokenColor.NVNumber; break; case TokenType.NVDirectiveHash: case TokenType.NVDirectiveName: case TokenType.NVDirectiveLParen: case TokenType.NVDirectiveRParen: color = NVelocityTokenColor.NVDirective; break; case TokenType.NVEq: case TokenType.NVLte: case TokenType.NVLt: case TokenType.NVGt: case TokenType.NVGte: case TokenType.NVEqEq: case TokenType.NVNeq: case TokenType.NVPlus: case TokenType.NVMinus: case TokenType.NVMul: case TokenType.NVDiv: case TokenType.NVMod: case TokenType.NVAnd: case TokenType.NVOr: case TokenType.NVNot: color = NVelocityTokenColor.NVOperator; break; case TokenType.NVLParen: case TokenType.NVRParen: case TokenType.NVLBrack: case TokenType.NVRBrack: case TokenType.NVLCurly: case TokenType.NVRCurly: color = NVelocityTokenColor.NVBracket; break; case TokenType.NVDictionaryPercent: case TokenType.NVDictionaryLCurly: case TokenType.NVDictionaryRCurly: color = NVelocityTokenColor.NVDictionaryDelimiter; break; case TokenType.NVDictionaryKey: color = NVelocityTokenColor.NVDictionaryKey; break; case TokenType.NVDictionaryEquals: color = NVelocityTokenColor.NVDictionaryEquals; break; // +====================+ // | XML Tokens | // +====================+ case TokenType.XmlText: color = NVelocityTokenColor.XmlText; break; case TokenType.XmlComment: case TokenType.XmlCommentStart: case TokenType.XmlCommentEnd: color = NVelocityTokenColor.XmlComment; break; case TokenType.XmlTagName: color = NVelocityTokenColor.XmlTagName; break; case TokenType.XmlAttributeName: color = NVelocityTokenColor.XmlAttributeName; break; case TokenType.XmlAttributeText: color = NVelocityTokenColor.XmlAttributeValue; break; case TokenType.XmlTagStart: case TokenType.XmlTagEnd: case TokenType.XmlCDataStart: case TokenType.XmlCDataEnd: color = NVelocityTokenColor.XmlTagDelimiter; break; case TokenType.XmlForwardSlash: case TokenType.XmlQuestionMark: case TokenType.XmlExclaimationMark: case TokenType.XmlEquals: case TokenType.XmlDoubleQuote: color = NVelocityTokenColor.XmlOperator; break; //case ??? // color = NVelocityTokenColor.XmlEntity; // break; case TokenType.XmlCDataSection: color = NVelocityTokenColor.XmlCDataSection; break; //case ??? // color = NVelocityTokenColor.XmlProcessingInstruction; // break; } tokenInfo.Color = (TokenColor)color; tokenInfo.StartIndex = token.Position.StartPos - 1; tokenInfo.EndIndex = token.Position.EndPos - 2; // Set the MemberSelect trigger on IntelliSense Member Completion characters switch (token.Type) { case TokenType.NVDollar: case TokenType.NVDot: case TokenType.NVDirectiveHash: case TokenType.NVDirectiveLParen: case TokenType.XmlTagStart: case TokenType.XmlAttributeMemberSelect: tokenInfo.Trigger = TokenTriggers.MemberSelect; break; case TokenType.NVLParen: case TokenType.NVRParen: tokenInfo.Trigger = TokenTriggers.ParameterEnd | TokenTriggers.MatchBraces; break; case TokenType.NVComma: tokenInfo.Trigger = TokenTriggers.ParameterNext; break; } return(true); }
internal void ScanInterpolatedStringLiteralTop(ArrayBuilder <Interpolation> interpolations, bool isVerbatim, ref TokenInfo info, ref SyntaxDiagnosticInfo error, out bool closeQuoteMissing) { var subScanner = new InterpolatedStringScanner(this, isVerbatim); subScanner.ScanInterpolatedStringLiteralTop(interpolations, ref info, out closeQuoteMissing); error = subScanner.error; info.Text = TextWindow.GetText(false); }
private void ScanStringLiteral(ref TokenInfo info, bool allowEscapes = true) { var quoteCharacter = TextWindow.PeekChar(); if (quoteCharacter == '\'' || quoteCharacter == '"') { TextWindow.AdvanceChar(); _builder.Length = 0; while (true) { char ch = TextWindow.PeekChar(); if (ch == '\\' && allowEscapes) { // normal string & char constants can have escapes char c2; ch = this.ScanEscapeSequence(out c2); _builder.Append(ch); if (c2 != SlidingTextWindow.InvalidCharacter) { _builder.Append(c2); } } else if (ch == quoteCharacter) { TextWindow.AdvanceChar(); break; } else if (SyntaxFacts.IsNewLine(ch) || (ch == SlidingTextWindow.InvalidCharacter && TextWindow.IsReallyAtEnd())) { //String and character literals can contain any Unicode character. They are not limited //to valid UTF-16 characters. So if we get the SlidingTextWindow's sentinel value, //double check that it was not real user-code contents. This will be rare. Debug.Assert(TextWindow.Width > 0); this.AddError(ErrorCode.ERR_NewlineInConst); break; } else { TextWindow.AdvanceChar(); _builder.Append(ch); } } info.Text = TextWindow.GetText(true); if (quoteCharacter == '\'') { info.Kind = SyntaxKind.CharacterLiteralToken; if (_builder.Length != 1) { this.AddError((_builder.Length != 0) ? ErrorCode.ERR_TooManyCharsInConst : ErrorCode.ERR_EmptyCharConst); } if (_builder.Length > 0) { info.StringValue = TextWindow.Intern(_builder); info.CharValue = info.StringValue[0]; } else { info.StringValue = string.Empty; info.CharValue = SlidingTextWindow.InvalidCharacter; } } else { info.Kind = SyntaxKind.StringLiteralToken; if (_builder.Length > 0) { info.StringValue = TextWindow.Intern(_builder); } else { info.StringValue = string.Empty; } } } else { info.Kind = SyntaxKind.None; info.Text = null; } }
private void ScanVerbatimStringLiteral(ref TokenInfo info, bool allowNewlines = true) { _builder.Length = 0; if (TextWindow.PeekChar() == '@' && TextWindow.PeekChar(1) == '"') { TextWindow.AdvanceChar(2); bool done = false; char ch; _builder.Length = 0; while (!done) { switch (ch = TextWindow.PeekChar()) { case '"': TextWindow.AdvanceChar(); if (TextWindow.PeekChar() == '"') { // Doubled quote -- skip & put the single quote in the string TextWindow.AdvanceChar(); _builder.Append(ch); } else { done = true; } break; case SlidingTextWindow.InvalidCharacter: if (!TextWindow.IsReallyAtEnd()) { goto default; } // Reached the end of the source without finding the end-quote. Give // an error back at the starting point. this.AddError(ErrorCode.ERR_UnterminatedStringLit); done = true; break; default: if (!allowNewlines && SyntaxFacts.IsNewLine(ch)) { this.AddError(ErrorCode.ERR_UnterminatedStringLit); done = true; break; } TextWindow.AdvanceChar(); _builder.Append(ch); break; } } info.Kind = SyntaxKind.StringLiteralToken; info.Text = TextWindow.GetText(false); info.StringValue = _builder.ToString(); } else { info.Kind = SyntaxKind.None; info.Text = null; info.StringValue = null; } }
public override Declarations GetDeclarations(Microsoft.VisualStudio.TextManager.Interop.IVsTextView view, int line, int col, TokenInfo info, ParseReason reason) { return(myDecls); }
public TokenInfo GetTokenInfo(IVsTextView textView) { //get current line int line, idx; textView.GetCaretPos(out line, out idx); TokenInfo info = new TokenInfo(); //get line info TokenInfo[] lineInfo = this.colorizer.GetLineInfo(line, this.colorState); if (lineInfo != null) { //get character info GetTokenInfoAt(lineInfo, idx - 1, ref info); } return info; }
/// <summary> /// RestRequest를 생성합니다. /// </summary> /// <param name="resource">리소스 url</param> /// <param name="method">요청 메서드</param> /// <param name="parameterJson">기본 null. POST,PUT,DELETE때 Body</param> /// <param name="token">기본 null. 토큰</param> /// <param name="urlSegments">기본 null. url파라미터</param> /// <param name="headers">기본 null. 헤더</param> /// <returns>RestRequest</returns> private static RestRequest CreateRequest(string resource, Method method, string parameterJson, TokenInfo token = null, UrlSegment[] urlSegments = null, Header[] headers = null) { var restRequest = new RestRequest(resource, method) { Timeout = Options.timeOut }; restRequest = AddToRequest(restRequest, token, parameterJson, urlSegments, headers); return(restRequest); }
static bool MatchToken( WORDEXTFLAGS flags, TokenInfo info ) { if ((flags & WORDEXTFLAGS.WORDEXT_FINDTOKEN) != 0) return !(info.type == TokenType.Comment || info.type == TokenType.LineComment || info.type == TokenType.Embedded); else return (info.type == TokenType.Keyword || info.type == TokenType.Identifier || info.type == TokenType.String || info.type == TokenType.Literal ); }
private static void SaveToken(TokenInfo token) { var json = JsonConvert.SerializeObject(token); File.WriteAllText(TokenFilePath, json); }
public virtual void MethodTip(IVsTextView textView, int line, int idx, TokenInfo info) { this.BeginParse(line, idx, info, ParseReason.MethodTip, textView, new ParseResultHandler(this.HandleMethodTipResponse)); }
internal void BeginParse( int line, int idx, TokenInfo info, ParseReason reason, IVsTextView view, ParseResultHandler callback) { string text = null; if (reason == ParseReason.MemberSelect || reason == ParseReason.MethodTip) text = this.GetTextUpToLine( line ); else if (reason == ParseReason.CompleteWord || reason == ParseReason.QuickInfo) text = this.GetTextUpToLine( line+1 ); else text = this.GetTextUpToLine( 0 ); // get all the text. string fname = this.GetFilePath(); this.service.BeginParse(new ParseRequest(line, idx, info, text, fname, reason, view), callback); }
internal void ScanInterpolatedStringLiteralTop(ArrayBuilder<Interpolation> interpolations, bool isVerbatim, ref TokenInfo info, ref SyntaxDiagnosticInfo error, out bool closeQuoteMissing) { var subScanner = new InterpolatedStringScanner(this, isVerbatim); subScanner.ScanInterpolatedStringLiteralTop(interpolations, ref info, out closeQuoteMissing); error = subScanner.error; info.Text = TextWindow.GetText(false); }
private static void AddToken(string symbol, string hash, string name, int decimals) { _tokenScripts[symbol] = new TokenInfo { symbol = symbol, hash = hash, name = name, decimals = decimals }; }
public virtual ParseRequest CreateParseRequest(Source s, int line, int idx, TokenInfo info, string sourceText, string fname, ParseReason reason, IVsTextView view);
private string MakeYoutubeGetRequest(TokenInfo googleAuthorization, HttpClient httpClient, string uri) { var httpRequestMessage = new HttpRequestMessage(HttpMethod.Get, uri); httpRequestMessage.Headers.Authorization = new AuthenticationHeaderValue("Bearer", googleAuthorization.access_token); var responseMessage = httpClient.SendAsync(httpRequestMessage) .Result.Content.ReadAsStringAsync().Result; return responseMessage; }
private ClassificationSpan ClassifyToken(SnapshotSpan span, TokenInfo token, int lineNumber) { IClassificationType classification = null; if (token.Category == TokenCategory.Operator) { if (token.Trigger == TokenTriggers.MemberSelect) { classification = _provider.DotClassification; } } else if (token.Category == TokenCategory.Grouping) { if (token.Trigger == (TokenTriggers.MatchBraces | TokenTriggers.ParameterStart)) { classification = _provider.OpenGroupingClassification; } else if (token.Trigger == (TokenTriggers.MatchBraces | TokenTriggers.ParameterEnd)) { classification = _provider.CloseGroupingClassification; } } else if (token.Category == TokenCategory.Delimiter) { if (token.Trigger == TokenTriggers.ParameterNext) { classification = _provider.CommaClassification; } } if (classification == null) { CategoryMap.TryGetValue(token.Category, out classification); } if (classification != null) { var line = span.Snapshot.GetLineFromLineNumber(lineNumber); var index = line.Start.Position + token.SourceSpan.Start.Column - 1; var tokenSpan = new Span(index, token.SourceSpan.Length); var intersection = span.Intersection(tokenSpan); if (intersection != null && intersection.Value.Length > 0) { return new ClassificationSpan(new SnapshotSpan(span.Snapshot, tokenSpan), classification); } } return null; }
private ClassificationSpan ClassifyToken(SnapshotSpan span, TokenInfo token, int lineNumber) { IClassificationType classification = null; if (token.Category == TokenCategory.Operator) { if (token.Trigger == TokenTriggers.MemberSelect) { classification = _provider.DotClassification; } } else if (token.Category == TokenCategory.Grouping) { if ((token.Trigger & TokenTriggers.MatchBraces) != 0) { classification = _provider.GroupingClassification; } } else if (token.Category == TokenCategory.Delimiter) { if (token.Trigger == TokenTriggers.ParameterNext) { classification = _provider.CommaClassification; } } if (classification == null) { CategoryMap.TryGetValue(token.Category, out classification); } if (classification != null) { var tokenSpan = SnapshotSpanToSpan(span.Snapshot, token, lineNumber); var intersection = span.Intersection(tokenSpan); if (intersection != null && intersection.Value.Length > 0 || (span.Length == 0 && tokenSpan.Contains(span.Start))) { // handle zero-length spans which Intersect and Overlap won't return true on ever. return new ClassificationSpan(new SnapshotSpan(span.Snapshot, tokenSpan), classification); } } return null; }
public int GetTokenInfoAt(TokenInfo[] infos, int col, ref TokenInfo info) { for (int i = 0, len = infos.Length; i < len; i++) { int start = infos[i].startIndex; // 1-based to zero based. int end = infos[i].endIndex; // 1-based to zero based. if (i == 0 && start > col) return -1; if (col >= start && col <= end) { info = infos[i]; return i; } } return -1; }
public override void Enumerate(IMemberCompletionAcceptor acceptor) { if (acceptor == null) { throw ExceptionBuilder.ArgumentNull("acceptor"); } // Enumerate the complete global scope // // 1. Enumerate all table refs and column refs as they are part // of the query scope. foreach (TableRefBinding tableRefBinding in _queryScope.GetAllTableRefBindings()) { acceptor.AcceptTableRef(tableRefBinding); } foreach (ColumnRefBinding columnRefBinding in _queryScope.GetAllColumnRefBindings()) { acceptor.AcceptColumnRef(columnRefBinding); } // 2. Enumerate all tables, constants, aggregates and functions foreach (TableBinding table in _scope.DataContext.Tables) { acceptor.AcceptTable(table); } foreach (ConstantBinding constant in _scope.DataContext.Constants) { acceptor.AcceptConstant(constant); } foreach (AggregateBinding aggregateBinding in _scope.DataContext.Aggregates) { acceptor.AcceptAggregate(aggregateBinding); } foreach (FunctionBinding functionBinding in _scope.DataContext.Functions) { acceptor.AcceptFunction(functionBinding); } // 3. Enumerate parameters foreach (ParameterBinding parameter in _scope.Parameters) { acceptor.AcceptParameter(parameter); } // 4. Enumerate keywords TokenId[] tokenIDs = (TokenId[])Enum.GetValues(typeof(TokenId)); foreach (TokenId tokenID in tokenIDs) { TokenInfo info = TokenInfo.FromTokenId(tokenID); if (info.IsKeyword) { acceptor.AcceptKeyword(info.Text); } } // 5. Enumerate relations TableRefBinding[] tableRefBindings = _queryScope.GetAllTableRefBindings(); foreach (TableRefBinding parentTableRef in tableRefBindings) { IList <TableRelation> relations = _scope.DataContext.TableRelations.GetChildRelations(parentTableRef.TableBinding); foreach (TableRelation relation in relations) { TableBinding childTable = (relation.ParentTable == parentTableRef.TableBinding) ? relation.ChildTable : relation.ParentTable; TableRefBinding childTableRef = null; foreach (TableRefBinding tableRefBinding in tableRefBindings) { if (tableRefBinding.TableBinding == childTable) { childTableRef = tableRefBinding; break; } } if (childTableRef != null) { acceptor.AcceptRelation(parentTableRef, childTableRef, relation); } } } }
private void ScanInterpolatedStringLiteral(bool isVerbatim, ref TokenInfo info) { // We have a string of the form // $" ... " // or, if isVerbatim is true, of the form // $@" ... " // Where the contents contains zero or more sequences // { STUFF } // where these curly braces delimit STUFF in expression "holes". // In order to properly find the closing quote of the whole string, // we need to locate the closing brace of each hole, as strings // may appear in expressions in the holes. So we // need to match up any braces that appear between them. // But in order to do that, we also need to match up any // /**/ comments, ' characters quotes, () parens // [] brackets, and "" strings, including interpolated holes in the latter. SyntaxDiagnosticInfo error = null; bool closeQuoteMissing; ScanInterpolatedStringLiteralTop(null, isVerbatim, ref info, ref error, out closeQuoteMissing); this.AddError(error); }
public LineTokenization(TokenInfo[] tokens, object state) { Tokens = tokens; State = state; }
internal void ScanInterpolatedStringLiteralTop(ArrayBuilder<Interpolation> interpolations, ref TokenInfo info, out bool closeQuoteMissing) { Debug.Assert(lexer.TextWindow.PeekChar() == '$'); lexer.TextWindow.AdvanceChar(); // $ if (isVerbatim) { Debug.Assert(lexer.TextWindow.PeekChar() == '@'); lexer.TextWindow.AdvanceChar(); // @ } Debug.Assert(lexer.TextWindow.PeekChar() == '"'); lexer.TextWindow.AdvanceChar(); // " ScanInterpolatedStringLiteralContents(interpolations); if (lexer.TextWindow.PeekChar() != '"') { Debug.Assert(IsAtEnd()); if (error == null) { int position = IsAtEnd(true) ? lexer.TextWindow.Position - 1 : lexer.TextWindow.Position; error = lexer.MakeError(position, 1, isVerbatim ? ErrorCode.ERR_UnterminatedStringLit : ErrorCode.ERR_NewlineInConst); } closeQuoteMissing = true; } else { // found the closing quote lexer.TextWindow.AdvanceChar(); // " closeQuoteMissing = false; } info.Kind = SyntaxKind.InterpolatedStringToken; }
public ActionResult <bool> Validate([FromBody] TokenInfo tokenInfo) { return(Ok(_userService.ValidateToken(tokenInfo.Login, tokenInfo.Token))); }