public ImageRequest(String url, IListener listener, int maxWidth, int maxHeight, Android.Widget.ImageView.ScaleType scaleType, Android.Graphics.Bitmap.Config decodeConfig, IErrorListener errorListener) : base(Method.GET, url, errorListener) { SetRetryPolicy(new DefaultRetryPolicy(IMAGE_TIMEOUT_MS, IMAGE_MAX_RETRIES, IMAGE_BACKOFF_MULT)); mListener = listener; mDecodeConfig = decodeConfig; mMaxWidth = maxWidth; mMaxHeight = maxHeight; mScaleType = scaleType; }
public static void SetListener(IErrorListener listener) { _listener = listener; }
public ImageRequest(String url, IListener listener, int maxWidth, int maxHeight, Android.Graphics.Bitmap.Config decodeConfig, IErrorListener errorListener) : this(url, listener, maxWidth, maxHeight, Android.Widget.ImageView.ScaleType.CenterInside, decodeConfig, errorListener) { }
private static void CheckInstanceParent(IEnumerable <Type> validParentTypes, IContained obj, IErrorListener <TraverserException> errLis) { var valid = false; obj.Parent.GetType().ForTypeBaseTypesAndInterfaces(t => valid |= validParentTypes.Contains(t)); if (valid) { return; } var parentsText = validParentTypes.Select(t => $"'{Names.OfType(t).ToLower()}s'").DelimiteredConcat(", ", " or "); var message = $"A '{obj.ObjectName.ToLower()}' has a '{obj.Parent.ObjectName.ToLower()}' for a parent. " + $"'{obj.ObjectName.ToUpperFirstLowerRest()}s' are expected to have {parentsText} as parents."; errLis.Error(new TraverserException(obj, message)); }
/// <summary> /// Create list of tokens from epl /// </summary> /// <param name="epl">EPL code</param> /// <param name="listener">Error listener</param> /// <returns>List of tokens</returns> public static IList <EPLCommand> Parse(string epl, IErrorListener listener) { if (epl == null) { throw new ArgumentNullException("epl"); } if (!epl.EndsWith(Environment.NewLine)) { epl += Environment.NewLine; } IList <EPLCommand> commands = new List <EPLCommand>(); // Get list of commands, sorted by length descending string[] commandList = EPLCommandHelper.CommandDefinition.Select(item => item.Key).OrderByDescending(item => item.Length).ToArray(); var eplTokenizer = new Lexer.Tokenizer(new Lexer.ParserConfiguration(), listener); eplTokenizer.Parse(epl); var tokens = eplTokenizer.Tokens; RawToken lastToken = null; RawToken token = null; EPLCommand lastCommand = null; int lineNumber = 1; int paramCounter = 0; ParserState state = ParserState.ExpectNewCommand; int _qCommandExists = 0; int _RCommandExists = 0; while (tokens.Count > 0) { lastToken = token; token = tokens.PopFirst(); // Parse x-command if (token.Content.StartsWith(";")) { lineNumber++; state = ParserState.ExpectNewCommand; } else if (token.Content == "{" && state == ParserState.ExpectParameter) { StringBuilder sb = new StringBuilder(); sb.Append("{"); while (tokens.Count > 0) { token = tokens.PopFirst(); if (token.Content == "}") { sb.Append(token.Content); break; } else if (token.Content == Environment.NewLine) { listener.Error("Expected token }", lineNumber, token.Index.Item1, token.Index.Item2); } else { sb.Append(token.Content); } } // Set as command if (lastCommand != null) { string[] newParamArr = new string[paramCounter + 1]; newParamArr[paramCounter] = sb.ToString(); if (lastCommand.Parameter != null) { for (int i = 0; i < paramCounter; i++) { newParamArr[i] = lastCommand.Parameter[i]; } } // Set token content lastCommand.Parameter = newParamArr; paramCounter++; state = ParserState.ExpectNewCommand | ParserState.ExpectParamSeperator; } state = ParserState.ExpectParamSeperator | ParserState.ExpectNewCommand; } // Parse command data else if (token.Content.StartsWith("\"") && token.Content.EndsWith("\"") && state == ParserState.ExpectParameter && token.Content.Length > 1) { if (lastCommand != null) { lastCommand.Data = token.Content.Remove(0, 1); lastCommand.Data = lastCommand.Data.Remove(lastCommand.Data.Length - 1, 1); } state = ParserState.ExpectParamSeperator | ParserState.ExpectNewCommand; } else if (token.Content == "@{" && state == ParserState.ExpectNewCommand) { StringBuilder sb = new StringBuilder(); while (tokens.Count > 0) { token = tokens.PopFirst(); if (token.Content == "}@") { break; } else { if (!sb.ToString().EndsWith(Environment.NewLine)) { sb.Append(" "); } sb.Append(token.Content); } } commands.Add(new ScriptCommand() { Data = sb.ToString() }); } else if (token.Content == "," && (state & ParserState.ExpectParamSeperator) == ParserState.ExpectParamSeperator) { state = ParserState.ExpectParameter; } else if (token.Content == Environment.NewLine && (state == ParserState.ExpectNewCommand || (state & ParserState.ExpectParamSeperator) == ParserState.ExpectParamSeperator)) { state = ParserState.ExpectNewCommand; lineNumber++; } else if (IsCommand(commandList, token.Content) && (state & ParserState.ExpectNewCommand) == ParserState.ExpectNewCommand || (state & ParserState.ExpectParamSeperator) == ParserState.ExpectParamSeperator) { // go through the list of commands lastCommand = null; paramCounter = 0; foreach (string cmd in commandList) { if (token.Content.StartsWith(cmd)) { lastCommand = EPLCommandHelper.GetInstance(cmd); lastCommand.Token = token; if (lastCommand is UnkownCommand) { (lastCommand as UnkownCommand).CommandName = cmd; } else if (lastCommand is LabelWidthCommand) { _qCommandExists = lineNumber; } else if (lastCommand is RCommand) { _RCommandExists = lineNumber; } string newTokenContent = token.Content.Remove(0, cmd.Length); if (newTokenContent == "" && (tokens.Count == 0 || tokens.PeekFirst().Content == Environment.NewLine)) { state = ParserState.ExpectNewCommand; } else { state = ParserState.ExpectParameter; if (!string.IsNullOrWhiteSpace(newTokenContent)) { // Add first parameter value again tokens.PushFront(new Lexer.RawToken(newTokenContent, new Tuple <int, int>(0, 0), new Tuple <int, int>(0, 0))); } } break; } } if (lastCommand != null) { commands.Add(lastCommand); } } else if (state == ParserState.ExpectParameter && token.Content != Environment.NewLine) { if (lastCommand != null) { string[] newParamArr = new string[paramCounter + 1]; newParamArr[paramCounter] = token.Content; if (lastCommand.Parameter != null) { for (int i = 0; i < paramCounter; i++) { newParamArr[i] = lastCommand.Parameter[i]; } } // Set token content lastCommand.Parameter = newParamArr; paramCounter++; state = ParserState.ExpectNewCommand | ParserState.ExpectParamSeperator; } } else { listener.Error("Unexpected token near: " + (lastToken ?? token).Content, lineNumber, 0, 0); } } if (_qCommandExists > 0 && _RCommandExists > 0) { if (_qCommandExists < _RCommandExists) { listener.Error("If the R-Command is sent after the q-command, the image buffer will be reformatted to printer width.", _RCommandExists, 0, 0); } else { listener.Error("The R-command forces the printer to use the full width of the print head as the width of the label/image buffer.", _qCommandExists, 0, 0); } } return(commands); }
public static bool Parse(string sourceText, string fileName, CSharpParseOptions options, IErrorListener listener, out ITokenStream tokens, out XSharpParser.SourceContext tree) { tree = null; tokens = null; var parseErrors = ParseErrorData.NewBag(); try { var lexer = XSharpLexer.Create(sourceText, fileName, options); lexer.Options = options; BufferedTokenStream tokenStream = lexer.GetTokenStream(); tokenStream.Fill(); tokens = (ITokenStream)tokenStream; GetLexerErrors(lexer, tokenStream, parseErrors); // do we need to preprocess #region Determine if we really need the preprocessor bool mustPreprocess = true; if (lexer.HasPreprocessorTokens || !options.NoStdDef) { // no need to pre process in partial compilation // if lexer does not contain UDCs, Messages or Includes mustPreprocess = lexer.MustBeProcessed; } else { mustPreprocess = false; } #endregion XSharpPreprocessor pp = null; BufferedTokenStream ppStream = null; pp = new XSharpPreprocessor(lexer, tokenStream, options, fileName, Encoding.Unicode, SourceHashAlgorithm.None, parseErrors); if (mustPreprocess) { var ppTokens = pp.PreProcess(); ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens)); } else { // No Standard Defs and no preprocessor tokens in the lexer // so we bypass the preprocessor and use the lexer token stream ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokenStream.GetTokens())); } ppStream.Fill(); var parser = new XSharpParser(ppStream); parser.Interpreter.tail_call_preserves_sll = false; // default = true Setting to FALSE will reduce memory used by parser parser.Options = options; tree = null; parser.RemoveErrorListeners(); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.ErrorHandler = new BailErrorStrategy(); try { tree = parser.source(); } catch (Exception) { var errorListener = new XSharpErrorListener(fileName, parseErrors); parser.AddErrorListener(errorListener); parser.ErrorHandler = new XSharpErrorStrategy(); parser.Interpreter.PredictionMode = PredictionMode.Ll; ppStream.Reset(); parser.Reset(); try { tree = parser.source(); } catch (Exception) { tree = null; } } } catch (Exception) { tree = null; } ReportErrors(parseErrors, listener); return(tree != null); }
/// <inheritdoc /> public ISegment ResolveFieldData(IErrorListener listener, IMetadata metadata, in FieldRvaRow fieldRvaRow)
/// <summary> /// Registers an error, and returns a default value for the provided type. /// </summary> /// <param name="self">The error listener.</param> /// <param name="exception">The error.</param> /// <typeparam name="T">The type of value to return.</typeparam> public static T RegisterExceptionAndReturnDefault <T>(this IErrorListener self, Exception exception) { self.RegisterException(exception); return(default);
public static bool Lex(string sourceText, string fileName, CSharpParseOptions options, IErrorListener listener, out ITokenStream tokens) { tokens = null; var parseErrors = ParseErrorData.NewBag(); try { var lexer = XSharpLexer.Create(sourceText, fileName, options); lexer.Options = options; var tokenStream = lexer.GetTokenStream(); tokenStream.Fill(); tokens = tokenStream; GetLexerErrors(lexer, tokenStream, parseErrors); #region Determine if we need to preprocess bool mustPreprocess = true; if (options.NoStdDef) { mustPreprocess = lexer.MustBeProcessed || lexer.HasPreprocessorTokens; } #endregion XSharpPreprocessor pp = null; BufferedTokenStream ppStream = null; pp = new XSharpPreprocessor(lexer, tokenStream, options, fileName, Encoding.Unicode, SourceHashAlgorithm.None, parseErrors); if (mustPreprocess) { var ppTokens = pp.PreProcess(); ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, ppTokens)); } else { // No Standard Defs and no preprocessor tokens in the lexer // so we bypass the preprocessor and use the lexer token stream ppStream = new CommonTokenStream(new XSharpListTokenSource(lexer, tokenStream.GetTokens())); } ppStream.Fill(); } catch (Exception) { } ReportErrors(parseErrors, listener); return(tokens != null); }
/// <summary> /// Registers an instance of a <see cref="BadImageFormatException"/> class. /// </summary> /// <param name="self">The error listener.</param> /// <param name="message">The message of the error.</param> public static void BadImage(this IErrorListener self, string message) { self.RegisterException(new BadImageFormatException(message)); }
/// <summary> /// Registers an instance of a <see cref="NotSupportedException"/> class. /// </summary> /// <param name="self">The error listener.</param> /// <param name="message">The message of the error.</param> public static void NotSupported(this IErrorListener self, string message) { self.RegisterException(new NotSupportedException(message)); }
public static void AddListener(IErrorListener listener) { _listeners.Add(listener); }
public static void MetadataBuilder(this IErrorListener listener, string message) { listener.RegisterException(new MetadataBuilderException(message)); }
/// <summary> /// Initializes the module read parameters with an error listener. /// </summary> /// <param name="errorListener">The object responsible for recording parser errors.</param> public ModuleReaderParameters(IErrorListener errorListener) : this(null, errorListener) { }
/// <summary> /// Creates a new instance of the <see cref="BlobSerializationContext"/> class. /// </summary> /// <param name="writer">The output stream to write the raw data to.</param> /// <param name="indexProvider">The object responsible for obtaining coded indices to types.</param> /// <param name="errorListener">The object responsible for collecting diagnostic information during the serialization process.</param> public BlobSerializationContext(IBinaryStreamWriter writer, ITypeCodedIndexProvider indexProvider, IErrorListener errorListener) { Writer = writer ?? throw new ArgumentNullException(nameof(writer)); IndexProvider = indexProvider ?? throw new ArgumentNullException(nameof(indexProvider)); ErrorListener = errorListener ?? throw new ArgumentNullException(nameof(errorListener)); }