public override Mediator.Intermediate.ICodeNode Parse(Tokens.Token token) { ICodeNode assignNode = IntermediateCodeFactory.CreateICodeNode("assign"); string targetName = token.Text; ISymbolTableEntry symId = Stack.Find(targetName); if (symId == null) symId = Stack.EnterLocal(targetName); symId.AppendLineNumber(token.LineNumber); token = NextToken(); ICodeNode variable = IntermediateCodeFactory.CreateICodeNode("variable"); variable.SetAttribute("id", symId); assignNode.AddChild(variable); if (token.TokenType == "colon_equals") { token = NextToken(); //the other side i.e the thint that is the value } else { ErrorHandler.Singleton.Flag(token, "missing_colon_equals", this); } //simplify and asume constant value (int) ICodeNode constant = IntermediateCodeFactory.CreateICodeNode("int_const"); constant.SetAttribute("value", token.Value); assignNode.AddChild(constant); NextToken(); //consume ; return assignNode; }
static HashingStringMatch() { tokens = new Tokens[300]; Random r = new Random(); //1. Build codes for each token for (char c = 'A'; c <= 'Z'; c++) { Tokens t = new Tokens(c, Convert.ToUInt64(r.Next(1, Int32.MaxValue))); tokens[Convert.ToInt32(c)] = t; } for (char c = 'a'; c <= 'z'; c++) { Tokens t = new Tokens(c, Convert.ToUInt64(r.Next(1, Int32.MaxValue))); tokens[Convert.ToInt32(c)] = t; } tokens[Convert.ToInt32(' ')] = new Tokens(' ', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('\'')] = new Tokens('\'', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('"')] = new Tokens('"', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('.')] = new Tokens('.', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32(',')] = new Tokens(',', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('!')] = new Tokens('!', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('?')] = new Tokens('?', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('-')] = new Tokens('-', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32(':')] = new Tokens(':', (UInt64)r.Next(1, Int32.MaxValue)); //tokens[Convert.ToInt32('t')] = new Tokens('t', 7); //tokens[Convert.ToInt32('e')] = new Tokens('e', 8); //tokens[Convert.ToInt32('s')] = new Tokens('s', 9); //tokens[Convert.ToInt32('a')] = new Tokens('t', 5); }
internal static string intern(Tokens token) { switch (token) { case Tokens.tDOT2: return ".."; case Tokens.tDOT3: return "..."; case Tokens.tPOW: return "**"; case Tokens.tUPLUS: return "+@"; case Tokens.tUMINUS: return "-@"; case Tokens.tCMP: return "<=>"; case Tokens.tGEQ: return ">="; case Tokens.tLEQ: return "<="; case Tokens.tEQ: return "=="; case Tokens.tEQQ: return "==="; case Tokens.tNEQ: return "!="; case Tokens.tMATCH: return "=~"; case Tokens.tNMATCH: return "!~"; case Tokens.tAREF: return "[]"; case Tokens.tASET: return "[]="; case Tokens.tLSHFT: return "<<"; case Tokens.tRSHFT: return ">>"; case Tokens.tCOLON2: return "::"; case Tokens.tOROP: return "||"; case Tokens.tANDOP: return "&&"; default: return token.ToString(); } }
public static Token Maek(Tokens kind, int tokLin, int tokCol, int tokELin, int tokECol) { return new Token( kind, new SourceSpan(new SourceLocation(1, tokLin, tokCol + 1), new SourceLocation(1, tokELin, tokECol + 1)) ); }
protected IPrimitiveToken EvaluateOperation(IPrimitiveToken x, IPrimitiveToken y, Tokens o) { switch(o) { case Tokens.EQ: return new BooleanToken(x.CompareTo(y) == 0); case Tokens.NEQ: return new BooleanToken(x.CompareTo(y) != 0); case Tokens.GT: return new BooleanToken(x.CompareTo(y) > 0); case Tokens.GEQ: return new BooleanToken(x.CompareTo(y) >= 0); case Tokens.LT: return new BooleanToken(x.CompareTo(y) < 0); case Tokens.LEQ: return new BooleanToken(x.CompareTo(y) <= 0); case Tokens.PLUS: return new NumberToken(x.ToDouble() + y.ToDouble()); case Tokens.MINUS: return new NumberToken(x.ToDouble() - y.ToDouble()); case Tokens.MULT: return new NumberToken(x.ToDouble() * y.ToDouble()); case Tokens.DIV: return new NumberToken(x.ToDouble() / y.ToDouble()); case Tokens.EXP: return new NumberToken(Math.Pow(x.ToDouble(), y.ToDouble())); case Tokens.CONCAT: return new StringToken(x.ToString() + y.ToString()); default: throw new ArgumentException("Unknown Operator"); } }
public AssertTokenizer/*!*/ Next() { _actualToken = _tokenizer.GetNextToken(); _actualValue = _tokenizer.TokenValue; _actualSpan = _tokenizer.TokenSpan; _allTokens.Add(_actualToken); _allValues.Add(_actualValue); return this; }
public static Token MaekString(Tokens kind, String value, int tokLin, int tokCol, int tokELin, int tokECol) { return new Token( kind, value.Substring(1, value.Length - 2), new SourceSpan(new SourceLocation(1, tokLin, tokCol + 1), new SourceLocation(1, tokELin, tokECol + 1)) ); }
// Test Tokens, TokenEnumerator static void Main() { // Testing Tokens by breaking the string into tokens: Tokens f = new Tokens("This is a well-done program.", new char[] {' ','-'}); foreach (string item in f) { Console.WriteLine(item); } }
public Player(string name, Int16 number) { _name = name; _number = number; _token = Tokens.None; _space = 0; _cash = 1500; }
// 测试标记 TokenEnumerator static void Main() { Tokens f = new Tokens("This is a well-done program.", new char [] {' ','-'}); foreach (string item in f) // 要将 string 更改为 int { Console.WriteLine(item); } }
public IToken InterpretExpression(IToken n, Tokens o) { n = InterpretExpression(n); if(n is IPrimitiveToken) { return EvaluateOperation((IPrimitiveToken) n, o); } else { return EvaluateComplexOperation(n, o); } }
// 测试标记 TokenEnumerator static void Main() { // 通过将字符串分解为标记来测试标记: Tokens f = new Tokens("This is a well-done program.", new char[] {' ','-'}); foreach (string item in f) { Console.WriteLine(item); } }
public CompilerOutput(string input, Tokens tokens, System.Numerics.Complex returnVal, ParseTree parseTree, PostfixedTokens postFixedTokens, string output) { this.Input = input; this.Tokens = tokens; this.ReturnValue = returnVal; this.ParseTree = parseTree; this.PostFixedTokens = postFixedTokens; this.Output = output; }
public TokenGroup(CA ca, Tokens token, List<int[]> liveCells) { Token = token; PopulationSize = ca.PopulationSize; NumberOfGens = ca.NumberOfGens; NhbdSize = ca.NhbdSize; Cells = liveCells; }
public IToken InterpretExpression(IToken x, IToken y, Tokens o) { x = InterpretExpression(x); y = InterpretExpression(y); if(x is IPrimitiveToken && y is IPrimitiveToken) { return EvaluateOperation((IPrimitiveToken) x, (IPrimitiveToken) y, o); } else { return EvaluateComplexOperation(x, y, o); } }
// Test the Tokens class. static void Main() { // Create a Tokens instance. Tokens f = new Tokens("This is a sample sentence.", new char[] {' ','-'}); // Display the tokens. foreach (string item in f) { System.Console.WriteLine(item); } }
protected IPrimitiveToken EvaluateOperation(IPrimitiveToken x, Tokens o) { switch(o) { case Tokens.PERCENT: return new NumberToken(x.ToDouble() * 0.01); case Tokens.UMINUS: return new NumberToken(x.ToDouble() * -1); default: throw new ArgumentException("Unknown Operator"); } }
void addToDisplay(Tokens token, string strToken) { if (resultState && token != Tokens.Operation) allClear(); // Check if token is allowed, if the input is "-" then also check if it can be added as a UnaryMinus or an Exp sign if (allowed(token) || strToken == "-" && (allowed(token = Tokens.UnaryMinus) || allowed(token = Tokens.ExpOperator))) { Display += strToken; history.Push(new Tuple<Tokens, string>(token, strToken)); } resultState = false; }
private void bParse_Click(object sender, EventArgs e) { string script = txtGrid.ScriptBox.Text; Tokens tokens = new Tokens(); for (int i = 0; i < script.Length; i++) { tokens.Add(script[i]); } tokens.Flush(); txtGrid.GridBox.DataSource = tokens.Content; tokens.PrintCache(); }
public void Test_Score() { TokenCollection good = Create(new[] {"ikea", "kitchen", "mouse"}); TokenCollection bad = Create(new[] {"house", "stock", "chicken"}); Processors proc = new Processors(); const string document = "ikea kitchen mouse ikea kitchen mouse ikea kitchen mouse ikea kitchen mouse"; Tokens tokens = new Tokens(document, proc); float score = Analyzer.Score(tokens, good, bad); //Assert.AreEqual(0.0f, score); }
protected override IToken EvaluateComplexOperation(IToken n, Tokens o) { Variable vr; Variable vn = ConvertTokenToVariable(n); switch(o) { case Tokens.UMINUS: vr = new IntVariable(_network, "-" + vn.ToString()); new IntFunc(_network, IntFunc.Negate, vr, vn); return new VariableToken(vr); default: throw new ArgumentException(); } }
protected IDictionary<string, ValueObject> Apply(string doc, Tokens tokens, bool help = true, object version = null, bool optionsFirst = false, bool exit = false) { try { SetDefaultPrintExitHandlerIfNecessary(exit); var usageSections = ParseSection("usage:", doc); if (usageSections.Length == 0) throw new DocoptLanguageErrorException("\"usage:\" (case-insensitive) not found."); if (usageSections.Length > 1) throw new DocoptLanguageErrorException("More that one \"usage:\" (case-insensitive)."); var exitUsage = usageSections[0]; var options = ParseDefaults(doc); var pattern = ParsePattern(FormalUsage(exitUsage), options); var arguments = ParseArgv(tokens, options, optionsFirst); var patternOptions = pattern.Flat<Option>().Distinct().ToList(); // [default] syntax for argument is disabled foreach (OptionsShortcut optionsShortcut in pattern.Flat(typeof (OptionsShortcut))) { var docOptions = ParseDefaults(doc); optionsShortcut.Children = docOptions.Distinct().Except(patternOptions).ToList(); } Extras(help, version, arguments, doc); var res = pattern.Fix().Match(arguments); if (res.Matched && res.LeftIsEmpty) { var dict = new Dictionary<string, ValueObject>(); foreach (var p in pattern.Flat()) { dict[p.Name] = p.Value; } foreach (var p in res.Collected) { dict[p.Name] = p.Value; } return dict; } throw new DocoptInputErrorException(exitUsage); } catch (DocoptBaseException e) { if (!exit) throw; OnPrintExit(e.Message, e.ErrorCode); return null; } }
public void DumpTokenDetail(TextWriter/*!*/ output, Tokenizer/*!*/ tokenizer, Tokens token) { TokenValue value = tokenizer.TokenValue; output.Write("{0}: ", Parser.GetTerminalName((int)token)); switch (token) { default: break; case Tokens.Identifier: output.Write(value.String); break; case Tokens.Float: output.Write("{0}D", value.Double); break; case Tokens.Integer: output.Write(value.Integer1); break; case Tokens.BigInteger: output.Write("{0}BI", value.BigInteger.ToString(10)); break; case Tokens.RegexpEnd: output.Write("RegexOptions({0})", (RubyRegexOptions)value.Integer1); break; case Tokens.StringContent: if (value.StringContent is string) { output.Write("String(\"{0}\", {1})", Parser.EscapeString((string)value.StringContent), value.Encoding); } else { output.Write("String({0}), {1}", BitConverter.ToString((byte[])value.StringContent), value.Encoding); } break; case Tokens.StringBegin: case Tokens.RegexpBegin: case Tokens.ShellStringBegin: case Tokens.SymbolBegin: output.Write(((Tokenizer.State)tokenizer.CurrentState).CurrentSequence); break; } output.Write(' '); output.Write(tokenizer.LexicalState); output.WriteLine(); }
public bool ReadNextToken() { char c = Read(); while (true) { switch (c) { case Char.MinValue: _token = Tokens.Terminator; return false; case '\t': case ' ': _token = Tokens.Whitespace; c = Read(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': ReadNumber(c); break; case '+': _token = Tokens.Add; break; case '-': _token = Tokens.Subtract; break; case '*': _token = Tokens.Multiply; break; case '/': _token = Tokens.Divide; break; case '@': ReadFunction(c); return false; default: if (Char.IsLetter(c)) ReadCellReference(c); break; } if (_token != Tokens.Whitespace) return true; } }
/// <summary> /// function used by scanner to set values for value type tokens /// </summary> /// <param name="type">Value type. If no value type is specified 'STRING' is used by default</param> /// <returns>Token set by the function</returns> private Tokens SetValue(Tokens type) { yylval.strVal = yytext; switch (type) { case Tokens.STRING: yylval.strVal = yytext.Trim('\'', '"'); return Tokens.STRING; case Tokens.INTEGER: if (int.TryParse(yytext, out yylval.intVal)) return type; break; case Tokens.REAL: try { yylval.realVal = double.Parse(yytext, CultureInfo.InvariantCulture); return type; } catch (Exception) { } break; case Tokens.BOOLEAN: if (yytext.ToLower() == ".t.") { yylval.boolVal = true; return type; } if (yytext.ToLower() == ".f.") { yylval.boolVal = false; return type; } if (bool.TryParse(yytext, out yylval.boolVal)) return type; break; default: yylval.strVal = yytext.Trim('\'', '"'); return Tokens.STRING; } return Tokens.STRING; }
protected override IToken EvaluateComplexOperation(IToken x, IToken y, Tokens o) { Variable vx = ConvertTokenToVariable(x); Variable vy = ConvertTokenToVariable(y); Variable vr; string vname; Constraint c; switch(o) { case Tokens.EQ: return new ConstraintToken(new Equals(_network, vx, vy)); case Tokens.NEQ: return new ConstraintToken(new NotEquals(_network, vx, vy)); case Tokens.GT: c = new IntComparison(_network, IntComparison.Gt, vx, vy); return new ConstraintToken(c); case Tokens.GEQ: c = new IntComparison(_network, IntComparison.Ge, vx, vy); return new ConstraintToken(c); case Tokens.LT: c = new IntComparison(_network, IntComparison.Lt, vx, vy); return new ConstraintToken(c); case Tokens.LEQ: c = new IntComparison(_network, IntComparison.Le, vx, vy); return new ConstraintToken(c); case Tokens.PLUS: vname = "(" + vx.ToString() + " + " + vy.ToString() + ")"; vr = new IntVariable(_network, vname); new IntArith(_network, IntArith.Add, vr, vx, vy); return new VariableToken(vr); case Tokens.MINUS: vname = "(" + vx.ToString() + " - " + vy.ToString() + ")"; vr = new IntVariable(_network, vname); new IntArith(_network, IntArith.Subtract, vr, vx, vy); return new VariableToken(vr); case Tokens.MULT: vname = "(" + vx.ToString() + " * " + vy.ToString() + ")"; vr = new IntVariable(_network, vname); new IntArith(_network, IntArith.MULTIPLY, vr, vx, vy); return new VariableToken(vr); default: throw new ArgumentException(); } }
public Level Analyze(Session ses, Tokens tokens) { Level lvl = new Level(this); ses.Leaf = lvl; tokens.SetLevel(lvl); // Match parameters for (int i = 0; i < Parameters.Count; i++) { Parameter p = Parameters[i]; p.Update(ses); MatchResult res = p.Analyze(ses, tokens, lvl); // if no tokens left, leave unmatched alone (to match previous params) // else, update so that later ones would not go back before the param // this also ensures auto completion even if the word is already completed // TODO: unmatched might jump back (e.g. "task name -s") if (tokens.Count == 0) break; if (res == MatchResult.Matched) lvl.FirstUnmatchedParam = i + 1; else if (res == MatchResult.Extensible) lvl.FirstUnmatchedParam = i; else if (p.Required) // implied "Failed" { // TODO: handle "required" ses.Error(new ArgumentException($"{p} requires a valid value")); break; } } // Raise event InputChanged?.Invoke(this, new ScriptEventArgs(ses, lvl.Arguments, lvl.Child)); return lvl; }
public void Analyze(Session ses, string input) { Tokens tokens = new Tokens(input); ses.Input = tokens.Last(); ses.Root = ses.Scope.Analyze(ses, tokens); if (tokens.Count > 0) ses.Input = tokens.First(); // If script is not loaded, it's time to load now ses.Leaf.Command.InitializeScript(ses); // Possibly empty - to clear widget when out of scope ses.Widget = ses.Leaf.Command.Widget; // Auto complete ses.Completion = new Completion(); Level lvl = ses.Input.Level; List<Parameter> param = lvl.Command.Parameters; for (int i = lvl.FirstUnmatchedParam; i < param.Count; i++) { param[i].Complete(ses.Completion, ses.Input.Text, ses.Leaf.Arguments); // no auto completion after required parameter if (param[i].Required) break; } // Auto launch if (AutoLaunch && ses.Completion.Completions.Count == 1) { Command cmd = ses.Completion.Completions[0].Unit as Command; if (cmd?.Standalone == true) ses.ExecuteCommand(this, new Level(cmd)); } }
internal Filters(Tokens e) : base(e) { }
/// <summary> /// <para>Returns current account's mutes.</para> /// <para>allowed values of exclude_types: "follow", "favourite", "reblog", "mention"</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> max_id (optional)</para> /// <para>- <c>long</c> since_id (optional)</para> /// <para>- <c>int</c> limit (optional)</para> /// <para>- <c>IEnumerable<string></c> exclude_types (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the list of account object.</para> /// </returns> public Task <Linked <Account> > GetAsync(params Expression <Func <string, object> >[] parameters) { return(Tokens.AccessApiAsync <Linked <Account> >(MethodType.Get, "mutes", Utils.ExpressionToDictionary(parameters))); }
/// <summary> /// <para>Mute an account.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the relationship object.</para> /// </returns> public Task <Relationship> MuteAccountAsync(IDictionary <string, object> parameters) { return(Tokens.AccessParameterReservedApiAsync <Relationship>(MethodType.Post, "accounts/{id}/mute", "id", parameters)); }
/// <summary> /// <para>Unmute an account.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the relationship object.</para> /// </returns> public Task <Relationship> UnmuteAccountAsync(params Expression <Func <string, object> >[] parameters) { return(Tokens.AccessParameterReservedApiAsync <Relationship>(MethodType.Post, "accounts/{id}/unmute", "id", Utils.ExpressionToDictionary(parameters))); }
public TokenEnumerator(Tokens t) { this.t = t; }
public Anchor(NodeParent parent, Token token) : base(parent) { Tokens.Add(token); _anchorType = token.ReadAnchorType(); }
private void WriteStringElement(Tokens t, object s) { xmlWriter.WriteElementString(t.ToString(), s.ToString()); }
/// <summary> /// <para>Create a new filter.</para> /// <para>allowed values of context: "home", "notifications", "public", "thread"</para> /// <para>Available parameters:</para> /// <para>- <c>string</c> phrase (required)</para> /// <para>- <c>IEnumerable<string></c> context (required)</para> /// <para>- <c>bool</c> irreversible (optional)</para> /// <para>- <c>bool</c> whole_word (optional)</para> /// <para>- <c>int</c> expires_in (optional)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the filter object.</para> /// </returns> public Task <Filter> PostAsync(params Expression <Func <string, object> >[] parameters) { return(Tokens.AccessApiAsync <Filter>(MethodType.Post, "filters", Utils.ExpressionToDictionary(parameters))); }
/// <summary> /// <para>Create a new filter.</para> /// <para>allowed values of context: "home", "notifications", "public", "thread"</para> /// <para>Available parameters:</para> /// <para>- <c>string</c> phrase (required)</para> /// <para>- <c>IEnumerable<string></c> context (required)</para> /// <para>- <c>bool</c> irreversible (optional)</para> /// <para>- <c>bool</c> whole_word (optional)</para> /// <para>- <c>int</c> expires_in (optional)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the filter object.</para> /// </returns> public Task <Filter> PostAsync(IDictionary <string, object> parameters) { return(Tokens.AccessApiAsync <Filter>(MethodType.Post, "filters", parameters)); }
/// <summary> /// <para>Text filters the user has configured that potentially must be applied client-side.</para> /// <para>Available parameters:</para> /// <para>- No parameters available in this method.</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the list of filter object.</para> /// </returns> public Task <IEnumerable <Filter> > GetAsync(IDictionary <string, object> parameters) { return(Tokens.AccessApiAsync <IEnumerable <Filter> >(MethodType.Get, "filters", parameters)); }
/// <summary> /// <para>Text filters the user has configured that potentially must be applied client-side.</para> /// <para>Available parameters:</para> /// <para>- No parameters available in this method.</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the list of filter object.</para> /// </returns> public Task <IEnumerable <Filter> > GetAsync(params Expression <Func <string, object> >[] parameters) { return(Tokens.AccessApiAsync <IEnumerable <Filter> >(MethodType.Get, "filters", Utils.ExpressionToDictionary(parameters))); }
public AssertTokenizer /*!*/ this[Tokens token] { get { return(Read(token)); } }
/// <summary> /// Builds a <see cref="Token"/>. /// </summary> /// <param name="tokentype">The <see cref="Tokens">type</see> of the token.</param> /// <param name="text">The string representation of the token.</param> /// <returns>Returns a <see cref="Token"/>.</returns> public static Token Token(Tokens tokentype, string text = null) { return(new Token(tokentype, text)); }
void WriteAttribute(Tokens attrKind, object val) { var attrString = FirstCharToLower(attrKind); xmlWriter.WriteAttributeString(attrString, val.ToString()); }
private void WriteStartElement(Tokens t) { xmlWriter.WriteStartElement(t.ToString()); }
/// <summary> /// <para>Returns a filter.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the filter object.</para> /// </returns> public Task <Filter> IdAsync(params Expression <Func <string, object> >[] parameters) { return(Tokens.AccessParameterReservedApiAsync <Filter>(MethodType.Get, "filters/{id}", "id", Utils.ExpressionToDictionary(parameters))); }
public void Emit(CompilationContext context) { if (Tokens[0] is DefaultLanguageTerminalToken) { var op = ((DefaultLanguageTerminalToken)Tokens[0]).Value; if (op == "&") { ((IHasAddress)Tokens.Last()).PushAddress(context); } else if (op == "*") { ((ICodeEmitter)Tokens.Last()).Emit(context); //Value of pointer -> eax context.EmitInstruction(new IRPop() { To = "eax" }); //TODO: type size int valueSize = GetExpressionType(context).GetSize(); if (valueSize > 4) { //[pointer] -> [sp] context.EmitInstruction(new IRMemCopy() { From = "eax", To = "sp", Length = new ImmediateValue(valueSize) }); //sp += size of value context.EmitInstruction(new IRMoveImmediate() { To = "ebx", Value = new ImmediateValue(valueSize) }); context.EmitInstruction(new IRAdd() { Left = "sp", Right = "ebx", To = "sp" }); } else { //value at memory[eax] -> ebx context.EmitInstruction(new IRLoadRegister() { From = "eax", To = "ebx", OperandSize = valueSize }); context.EmitInstruction(new IRPushRegister() { From = "ebx" }); } } } else { ((ICodeEmitter)Tokens[0]).Emit(context); } }
private static int CalculateIndentation(string baseline, ITextSnapshotLine line, IEditorOptions options, IClassifier classifier, ITextView textView) { int indentation = GetIndentation(baseline, options.GetTabSize()); int tabSize = options.GetIndentSize(); var tokens = classifier.GetClassificationSpans(line.Extent); if (tokens.Count > 0 && !IsUnterminatedStringToken(tokens[tokens.Count - 1])) { int tokenIndex = tokens.Count - 1; while (tokenIndex >= 0 && (tokens[tokenIndex].ClassificationType.IsOfType(PredefinedClassificationTypeNames.Comment) || tokens[tokenIndex].ClassificationType.IsOfType(PredefinedClassificationTypeNames.WhiteSpace))) { tokenIndex--; } if (tokenIndex < 0) { return(indentation); } if (Genero4glReverseParser.IsExplicitLineJoin(tokens[tokenIndex])) { // explicit line continuation, we indent 1 level for the continued line unless // we're already indented because of multiple line continuation characters. indentation = GetIndentation(line.GetText(), options.GetTabSize()); var joinedLine = tokens[tokenIndex].Span.Start.GetContainingLine(); if (joinedLine.LineNumber > 0) { var prevLineSpans = classifier.GetClassificationSpans(tokens[tokenIndex].Span.Snapshot.GetLineFromLineNumber(joinedLine.LineNumber - 1).Extent); if (prevLineSpans.Count == 0 || !Genero4glReverseParser.IsExplicitLineJoin(prevLineSpans[prevLineSpans.Count - 1])) { indentation += tabSize; } } else { indentation += tabSize; } return(indentation); } string sline = tokens[tokenIndex].Span.GetText(); var lastChar = sline.Length == 0 ? '\0' : sline[sline.Length - 1]; // use the expression parser to figure out if we're in a grouping... var spans = textView.BufferGraph.MapDownToFirstMatch( tokens[tokenIndex].Span, SpanTrackingMode.EdgePositive, PythonContentTypePrediciate ); if (spans.Count == 0) { return(indentation); } var revParser = new Genero4glReverseParser( spans[0].Snapshot, spans[0].Snapshot.TextBuffer, spans[0].Snapshot.CreateTrackingSpan( spans[0].Span, SpanTrackingMode.EdgePositive ) ); var tokenStack = new List <ClassificationSpan>(); tokenStack.Insert(0, null); bool endAtNextNull = false; foreach (var token in revParser) { tokenStack.Insert(0, token); if (token == null && endAtNextNull) { break; } else if (token != null && token.ClassificationType == Genero4glClassifierProvider.Keyword) { var tok = Tokens.GetToken(token.Span.GetText()); if (tok != null && Genero4glAst.ValidStatementKeywords.Contains(tok.Kind)) { switch (tok.Kind) { // Handle any tokens that are valid statement keywords in the autocomplete context but not in the "statement start" context case TokenKind.EndKeyword: continue; default: endAtNextNull = true; break; } } } } var indentStack = new System.Collections.Generic.Stack <LineInfo>(); var current = LineInfo.Empty; List <CancelIndent> cancelIndent = null; int cancelIndentStartingAt = -1; TokenKind firstStatement = TokenKind.EndOfFile; TokenKind latestIndentChangeToken = TokenKind.EndOfFile; ClassificationSpan firstToken = null; for (int i = 0; i < tokenStack.Count; i++) { var token = tokenStack[i]; if (token != null && firstToken == null) { firstToken = token; } if (token == null) { current.NeedsUpdate = true; } else if (token.IsOpenGrouping()) { indentStack.Push(current); var start = token.Span.Start; var line2 = start.GetContainingLine(); current = new LineInfo { Indentation = start.Position - line2.Start.Position + 1 }; } else if (token.IsCloseGrouping()) { if (indentStack.Count > 0) { current = indentStack.Pop(); } else { current.NeedsUpdate = true; } } else if (Genero4glReverseParser.IsExplicitLineJoin(token)) { while (token != null && i + 1 < tokenStack.Count) { i++; token = tokenStack[i]; } } else if (current.NeedsUpdate == true) { var tok = Tokens.GetToken(token.Span.GetText()); if (tok == null || !Genero4glAst.ValidStatementKeywords.Contains(tok.Kind)) { current.NeedsUpdate = false; } else { switch (tok.Kind) { // Handle any tokens that are valid statement keywords in the autocomplete context but not in the "statement start" context case TokenKind.EndKeyword: if (firstStatement != TokenKind.EndOfFile) { current.NeedsUpdate = false; } else { latestIndentChangeToken = tok.Kind; } break; default: { if (firstStatement == TokenKind.EndOfFile) { firstStatement = tok.Kind; } var line2 = token.Span.Start.GetContainingLine(); current = new LineInfo { Indentation = GetIndentation(line2.GetText(), tabSize) }; break; } } } } if (token != null && current.ShouldIndentAfter && cancelIndent != null) { // Check to see if we have following tokens that would cancel the current indent. var tok = Tokens.GetToken(token.Span.GetText()); var tokenCategory = token.ClassificationType; bool allPast = true; bool cancel = false; foreach (var ci in cancelIndent) { if (ci.TokensAhead < (i - cancelIndentStartingAt)) { continue; } else { allPast = false; if (ci.TokensAhead == (i - cancelIndentStartingAt)) { if (ci.UseCategory && ci.CancelCategory != null) { cancel = tokenCategory == ci.CancelCategory; } else if (tok != null) { cancel = tok.Kind == ci.CancelToken; } if (cancel) { break; } } } } if (cancel) { current.ShouldIndentAfter = false; } if (cancel || allPast) { cancelIndent = null; cancelIndentStartingAt = -1; latestIndentChangeToken = TokenKind.EndOfFile; } } if (token != null && ShouldDedentAfterKeyword(token)) { // dedent after some statements current.ShouldDedentAfter = true; } TokenKind tempChangeToken; if (token != null && indentStack.Count == 0 && firstToken == token && ShouldIndentAfterKeyword(token, out tempChangeToken, out cancelIndent)) { // except in a grouping if (latestIndentChangeToken != TokenKind.EndKeyword) { current.ShouldIndentAfter = true; } latestIndentChangeToken = tempChangeToken; if (cancelIndent != null) { cancelIndentStartingAt = i; } } } if (tokenStack.Count > 2 && tokenStack[tokenStack.Count - 2] != null) { if (latestIndentChangeToken != TokenKind.EndOfFile && _customIndentingRules.ContainsKey(latestIndentChangeToken)) { var potentialIndent = _customIndentingRules[latestIndentChangeToken](tokenStack, tabSize); if (potentialIndent != 0) { return(potentialIndent); } } // see if we have specific alignment rules if (firstStatement != TokenKind.EndOfFile && _customIndentingRules.ContainsKey(firstStatement)) { var potentialIndent = _customIndentingRules[firstStatement](tokenStack, tabSize); if (potentialIndent != 0) { return(potentialIndent); } } } indentation = current.Indentation + (current.ShouldIndentAfter ? tabSize : 0) - (current.ShouldDedentAfter ? tabSize : 0); } return(indentation); }
public async Task <IActionResult> Facebook([FromBody] FacebookAuthViewModel model) { // 1.generate an app access token var appAccessTokenResponse = await Client.GetStringAsync($"https://graph.facebook.com/oauth/access_token?client_id={_fbAuthSettings.AppId}&client_secret={_fbAuthSettings.AppSecret}&grant_type=client_credentials"); var appAccessToken = JsonConvert.DeserializeObject <FacebookAppAccessToken>(appAccessTokenResponse); // 2. validate the user access token var userAccessTokenValidationResponse = await Client.GetStringAsync($"https://graph.facebook.com/debug_token?input_token={model.AccessToken}&access_token={appAccessToken.AccessToken}"); var userAccessTokenValidation = JsonConvert.DeserializeObject <FacebookUserAccessTokenValidation>(userAccessTokenValidationResponse); if (!userAccessTokenValidation.Data.IsValid) { return(BadRequest(Errors.AddErrorToModelState("login_failure", "Invalid facebook token.", ModelState))); } // 3. we've got a valid token so we can request user data from fb var userInfoResponse = await Client.GetStringAsync($"https://graph.facebook.com/v2.8/me?fields=id,email,first_name,last_name,name,gender,locale,birthday,picture&access_token={model.AccessToken}"); var userInfo = JsonConvert.DeserializeObject <FacebookUserData>(userInfoResponse); // 4. ready to create the local user account (if necessary) and jwt var user = await _userManager.FindByEmailAsync(userInfo.Email); if (user == null) { var appUser = new AppUser { FirstName = userInfo.FirstName, LastName = userInfo.LastName, FacebookId = userInfo.Id, Email = userInfo.Email, UserName = userInfo.Email, PictureUrl = userInfo.Picture.Data.Url }; var result = await _userManager.CreateAsync(appUser, Convert.ToBase64String(Guid.NewGuid().ToByteArray()).Substring(0, 8)); if (!result.Succeeded) { return(new BadRequestObjectResult(Errors.AddErrorsToModelState(result, ModelState))); } await _appDbContext.Customers.AddAsync(new Customer { IdentityId = appUser.Id, Location = "", Locale = userInfo.Locale, Gender = userInfo.Gender }); await _appDbContext.SaveChangesAsync(); } // generate the jwt for the local user... var localUser = await _userManager.FindByNameAsync(userInfo.Email); if (localUser == null) { return(BadRequest(Errors.AddErrorToModelState("login_failure", "Failed to create local user account.", ModelState))); } var jwt = await Tokens.GenerateJwt(_jwtFactory.GenerateClaimsIdentity(localUser.UserName, localUser.Id), _jwtFactory, localUser.UserName, _jwtOptions, new JsonSerializerSettings { Formatting = Formatting.Indented }); return(new OkObjectResult(jwt)); }
protected abstract IToken EvaluateComplexOperation(IToken x, IToken y, Tokens o);
public AssertTokenizer /*!*/ Read(Tokens token) { Next(); Tests.Assert(_actualToken == token); return(this); }
/// <summary> /// <para>Unmute an status.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the status object.</para> /// </returns> public Task <Status> UnmuteStatusAsync(IDictionary <string, object> parameters) { return(Tokens.AccessParameterReservedApiAsync <Status>(MethodType.Post, "statuses/{id}/unmute", "id", parameters)); }
public CoreTweetWrapper() { // Use OAuth to open an authenticated session and make requests tokens = Tokens.Create(consumerKey, "8GSJ0HNkPaLJ89jqwYMFgRj015gdSBhscQ46xY6grs8FD9PQXm", accessToken, "WrcRG29RNb4U0bvCpW85E2L0jlmRSKWyIbjzPldxfMHEC", screenName: "JamesMSP"); }
public Twitter(string ck, string cs, string at, string ats) { tokens = CoreTweet.Tokens.Create(ck, cs, at, ats); }
/// <summary> /// <para>Returns current account's mutes.</para> /// <para>allowed values of exclude_types: "follow", "favourite", "reblog", "mention"</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> max_id (optional)</para> /// <para>- <c>long</c> since_id (optional)</para> /// <para>- <c>int</c> limit (optional)</para> /// <para>- <c>IEnumerable<string></c> exclude_types (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the list of account object.</para> /// </returns> public Task <Linked <Account> > GetAsync(IDictionary <string, object> parameters) { return(Tokens.AccessApiAsync <Linked <Account> >(MethodType.Get, "mutes", parameters)); }
public BlockSyntax ParseBlock() { var openBrace = Tokens.Expect <IOpenBraceToken>(); return(ParseRestOfBlock(openBrace)); }
private static int DefineStatementIndenting(List <ClassificationSpan> tokenList, int defaultTabSize) { if (tokenList[0] == null && tokenList[tokenList.Count - 1] == null) { if (tokenList.Count > 3) { var startTokText = tokenList[1].Span.GetText(); var startTok = Tokens.GetToken(startTokText); if (startTok != null) { var lastTokText = tokenList[tokenList.Count - 2].Span.GetText(); var lastTok = Tokens.GetToken(lastTokText); if (lastTok == null) { lastTok = Tokens.GetSymbolToken(lastTokText); } if (lastTok != null) { switch (lastTok.Kind) { case TokenKind.Comma: { switch (startTok.Kind) { case TokenKind.DefineKeyword: case TokenKind.ConstantKeyword: case TokenKind.TypeKeyword: { int nextIndex = 2; while (tokenList[nextIndex] == null) { nextIndex++; } // grab the next token and get its "indentation" var line = tokenList[nextIndex].Span.Start.GetContainingLine(); return(tokenList[nextIndex].Span.Start - line.Start); } } break; } case TokenKind.RecordKeyword: { int lastIndex = tokenList.Count - 3; while (tokenList[lastIndex] == null) { lastIndex--; } var checkPrevTok = Tokens.GetToken(tokenList[lastIndex].Span.GetText()); if (checkPrevTok == null || checkPrevTok.Kind != TokenKind.EndKeyword) { switch (startTok.Kind) { case TokenKind.DefineKeyword: case TokenKind.TypeKeyword: { // get the line of the last token var line = tokenList[tokenList.Count - 2].Span.Start.GetContainingLine(); return(GetIndentation(line.GetText(), defaultTabSize) + defaultTabSize); } } } break; } } } } } } return(0); }
/// <summary> /// Initializes a new instance of <see cref="Token"/> AST node. /// </summary> /// <param name="tokenType">The <see cref="Tokens">type</see> of the token.</param> /// <param name="text">The string representation of the token.</param> public Token(Tokens tokenType, string text = null) { this.tokenType = tokenType; this.text = text; }
/// <summary> /// <para>Returns a filter.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the filter object.</para> /// </returns> public Task <Filter> IdAsync(IDictionary <string, object> parameters) { return(Tokens.AccessParameterReservedApiAsync <Filter>(MethodType.Get, "filters/{id}", "id", parameters)); }
internal static void Format(ITextView textView, ITextEdit edit, int startLine, int endLine, bool applyEdits = true, bool indentEmptyLines = false) { int tabSize = textView.Options.GetTabSize(); int baseIndentation = 0; int prevLine = startLine; while (prevLine > 0) { // need to get the previous line that isn't blank var lineText = textView.TextBuffer.CurrentSnapshot.GetLineFromLineNumber(--prevLine).GetText(); if (!string.IsNullOrWhiteSpace(lineText) && !lineText.StartsWith("#")) { var words = lineText.Trim().Split(new[] { ' ' }); if (words.Length >= 1) { var tok = Tokens.GetToken(words[0]); if (tok != null && Genero4glAst.ValidStatementKeywords.Contains(tok.Kind)) { baseIndentation = GetIndentation(lineText, tabSize); break; } } } } bool editsMade = false; int currIndentation = baseIndentation; for (int i = startLine; i <= endLine; i++) { var line = textView.TextBuffer.CurrentSnapshot.GetLineFromLineNumber(i); var lineStr = line.GetText(); var lineIndentation = GetIndentation(lineStr, tabSize); bool postIncrement = false, preDecrement = false; var trimmed = lineStr.Trim(); var words = trimmed.Split(new[] { ' ' }); if (words.Length >= 1) { var tok = Tokens.GetToken(words[0]); if (tok != null) { // check to see if the current line is a valid "block" start. If it is, the indentation should be incremented if (SubBlockKeywords.ContainsKey(tok.Kind)) { if (i == startLine) { postIncrement = true; } else { preDecrement = postIncrement = true; } } else if (BlockKeywords.ContainsKey(tok.Kind)) { // increase indent on next line postIncrement = true; } else if (tok.Kind == TokenKind.EndKeyword) { if (i != startLine) { preDecrement = true; } } } } if (preDecrement) { currIndentation -= tabSize; } // apply current indentation if (lineIndentation != currIndentation && (indentEmptyLines || !string.IsNullOrWhiteSpace(lineStr))) { int diff = Math.Abs(lineIndentation - currIndentation); // TODO: need to handle tabs in the string if (lineIndentation < currIndentation) { StringBuilder sb = new StringBuilder(); // add spaces for (int j = 0; j < diff; j++) { sb.Append(' '); } edit.Insert(line.Start, sb.ToString()); } else { // remove spaces edit.Delete(line.Start, diff); } editsMade = true; } if (postIncrement) { currIndentation += tabSize; } } if (editsMade && applyEdits) { edit.Apply(); } }
public Tokens GetNextToken() { int current_state = yy_state_dtrans[(int)current_lexical_state]; int last_accept_state = NoState; bool is_initial_state = true; MarkTokenChunkStart(); token_start = token_chunk_start; expanding_token = false; AdvanceEndPosition((token_end > 0) ? token_end - 1 : 0, token_start); // capture token start position: token_start_pos.Char = token_end_pos.Char; if (acceptCondition[current_state] != AcceptConditions.NotAccept) { last_accept_state = current_state; MarkTokenEnd(); } while (true) { char lookahead = (is_initial_state && yy_at_bol) ? BOL : Advance(); int next_state = nextState[rowMap[current_state], colMap[lookahead]]; if (lookahead == EOF && is_initial_state) { return Tokens.EOF; } if (next_state != -1) { current_state = next_state; is_initial_state = false; if (acceptCondition[current_state] != AcceptConditions.NotAccept) { last_accept_state = current_state; MarkTokenEnd(); } } else { if (last_accept_state == NoState) { return Tokens.ERROR; } else { if ((acceptCondition[last_accept_state] & AcceptConditions.AcceptOnEnd) != 0) TrimTokenEnd(); MoveToTokenEnd(); if (last_accept_state < 0) { System.Diagnostics.Debug.Assert(last_accept_state >= 902); } else { bool accepted = false; yyreturn = Accept0(last_accept_state, out accepted); if (accepted) { AdvanceEndPosition(token_start, token_end - 1); return yyreturn; } } // token ignored: is_initial_state = true; current_state = yy_state_dtrans[(int)current_lexical_state]; last_accept_state = NoState; MarkTokenChunkStart(); if (acceptCondition[current_state] != AcceptConditions.NotAccept) { last_accept_state = current_state; MarkTokenEnd(); } } } } } // end of GetNextToken
/// <summary> /// <para>Deletes a filter.</para> /// <para>Available parameters:</para> /// <para>- <c>long</c> id (required)</para> /// </summary> /// <param name="parameters">The parameters.</param> /// <returns> /// <para>The task object representing the asynchronous operation.</para> /// <para>The Result property on the task object returns the empty object.</para> /// </returns> public Task DeleteAsync(IDictionary <string, object> parameters) { return(Tokens.AccessParameterReservedApiAsync(MethodType.Delete, "filters/{id}", "id", parameters)); }