public CompilationUnitSyntax(ISourceText sourceFile, IEnumerable<UsingSyntax> usingDirectives, IEnumerable<DeclarationSyntax> declarations, IEnumerable<Diagnostic> diagnostics) { SourceFile = sourceFile; UsingDirectives = usingDirectives.ToList(); Declarations = declarations.ToList(); Diagnostics = diagnostics.ToList(); }
public Location(ISourceText file, TextPosition position) { Requires.NotNull(file, nameof(file)); File = file; Position = position; }
public Location(ISourceText file, TextPosition position) { Requires.NotNull(file, nameof(file)); File = file; Position = position; }
public CompilationUnitSyntax(ISourceText sourceFile, IEnumerable<UsingSyntax> usingDirectives, IEnumerable<DeclarationSyntax> declarations, IEnumerable<Diagnostic> diagnostics) { SourceFile = sourceFile; UsingDirectives = usingDirectives.ToList(); Declarations = declarations.ToList(); Diagnostics = diagnostics.ToList(); }
public JazonLexer(ISourceText src) { source = src; buffer = new StringBuilder(256);//caching this in TLS does not change much result = run().GetEnumerator(); }
private static object read(ISourceText source, bool caseSensitiveMaps) { var lexer = new JsonLexer(source, throwErrors: true); var parser = new JsonParser(lexer, throwErrors: true, caseSensitiveMaps: caseSensitiveMaps); parser.Parse(); return(parser.ResultContext.ResultObject); }
public object DeserializeFromJson(ISourceText source, bool caseSensitiveMaps) { var lexer = new JsonLexer(source, throwErrors: true); var parser = new JsonParser(lexer, throwErrors: true, caseSensitiveMaps: caseSensitiveMaps); parser.Parse(); return(parser.ResultContext.ResultObject); }
private static object read(ISourceText source) { var lexer = new JSONLexer(source, throwErrors: true); var parser = new JSONParser(lexer, throwErrors: true); parser.Parse(); return(parser.ResultContext.ResultObject); }
private void read(ISourceText source) { var context = new LaconfigData(this); var lexer = new LaconfigLexer(source, throwErrors: true); var parser = new LaconfigParser(context, lexer, throwErrors: true); parser.Parse(); }
private string ReadOperator(ISourceText stream) { StringBuilder builder = new StringBuilder(3); while (!char.IsWhiteSpace(stream.Current) && !_invalidOperatorCharacters.Contains(stream.Current)) { builder.Append(stream.Current); stream.MoveNext(); } return(builder.ToString()); }
internal Diagnostic(DiagnosticLevel level, CompilerPhase phase, ISourceText file, TextPosition position, string message) { Requires.EnumDefined(level, nameof(level)); Requires.EnumDefined(phase, nameof(phase)); Requires.NotNull(file, nameof(file)); Requires.NotNullOrEmpty(message, nameof(message)); Level = level; Phase = phase; File = file; Position = position; Message = message; }
public static object Parse(ISourceText src, bool senseCase, int maxDepth = 64) { if (maxDepth < 0) { maxDepth = 0; // 0 = root literal value } var lexer = new JazonLexer(src); fetchPrimary(lexer); var data = doAny(lexer, senseCase, maxDepth); return(data); }
TryParseAndCheckAsync(string path, ISourceText sourceText, CancellationToken token) { if (this.projectOptions == null) { var optionsAsync = _provider.CheckerInstance.GetProjectOptionsFromScript( filename: path, source: sourceText, assumeDotNetFramework: false, useSdkRefs: true, useFsiAuxLib: true, previewEnabled: true, otherFlags: new string[] { "--targetprofile:netstandard" }, loadedTimeStamp: null, extraProjectInfo: null, optionsStamp: null, userOpName: null, sdkDirOverride: null ); var(options, errors) = await FSharpAsync.StartAsTask(optionsAsync, null, token); if (!errors.IsEmpty) { return(null, null); } this.projectOptions = options; } var performParseAndCheck = _provider.CheckerInstance.ParseAndCheckFileInProject( filename: path, fileVersion: 1, sourceText: sourceText, options: projectOptions, userOpName: null ); var(parseResults, checkAnswer) = await FSharpAsync.StartAsTask(performParseAndCheck, null, token); if (checkAnswer is FSharpCheckFileAnswer.Succeeded succeeded) { return(parseResults, succeeded.Item); } return(parseResults, null); }
private string ReadIdentifier(ISourceText stream) { const char identifierTerminator = '}'; StringBuilder builder = new StringBuilder("{", 12); stream.MoveNext(); if (char.IsLetter(stream.Current) || stream.Current == '_') { builder.Append(stream.Current); stream.MoveNext(); } else { Token token = new Token(TokenType.Unknown, stream.Current.ToString(), stream.Position); throw LangException.UnexpectedToken(token, TokenType.Identifier); } while (stream.Current != identifierTerminator) { if (char.IsLetterOrDigit(stream.Current) || stream.Current == '_') { builder.Append(stream.Current); stream.MoveNext(); } else { Token token = new Token(TokenType.Unknown, stream.Current.ToString(), stream.Position); throw LangException.UnexpectedToken(token, identifierTerminator); } } builder.Append(stream.Current); stream.MoveNext(); string text = builder.ToString(); if (text.Length == 2) { Token token = new Token(TokenType.Unknown, identifierTerminator.ToString(), stream.Position - 1); throw LangException.UnexpectedToken(token, identifierTerminator); } return(text); }
private static void PrintDiagnostics(PackageSyntax package) { ISourceText file = null; foreach (var diagnostic in package.Diagnostics) { if (file != diagnostic.File) { file = diagnostic.File; Console.WriteLine($"In {file.Name}"); } var level = diagnostic.Level.ToString(); var line = diagnostic.Position.Line + 1; var column = diagnostic.Position.Column + 1; Console.WriteLine($"{level} on line {line} at character {column}: "); Console.WriteLine($" {diagnostic.Message}"); } }
private string ReadNumber(ISourceText stream) { StringBuilder builder = new StringBuilder(8); bool hasDot = false; while (true) { if (stream.Current == '.') { if (!hasDot) { hasDot = true; builder.Append(stream.Current); stream.MoveNext(); } else { Token token = new Token(TokenType.Unknown, stream.Current.ToString(), stream.Position); throw LangException.UnexpectedToken(token, TokenType.Number); } } else if (char.IsDigit(stream.Current)) { builder.Append(stream.Current); stream.MoveNext(); } else { break; } } char peeked = stream.Peek(-1); if (peeked == '.') { Token token = new Token(TokenType.Unknown, peeked.ToString(), stream.Position); throw LangException.UnexpectedToken(token, TokenType.Number); } return(builder.ToString()); }
private static string ToString(IReadOnlyList <Diagnostic> diagnostics) { var builder = new StringBuilder(); ISourceText file = null; foreach (var diagnostic in diagnostics) { if (file != diagnostic.File) { file = diagnostic.File; builder.AppendLine($"In {file.Name}"); } var level = diagnostic.Level.ToString(); var line = diagnostic.Position.Line + 1; var column = diagnostic.Position.Column + 1; builder.AppendLine($"{level} on line {line} at character {column}: "); builder.AppendLine($" {diagnostic.Message}"); } return(builder.ToString()); }
TryParseAsync(string path, ISourceText sourceText, CancellationToken token) { if (parseOpts == null) { var defaults = FSharpParsingOptions.Default; this.parseOpts = new FSharpParsingOptions( sourceFiles: new string[] { path }, conditionalCompilationDefines: defaults.ConditionalCompilationDefines, errorSeverityOptions: defaults.ErrorSeverityOptions, isInteractive: defaults.IsInteractive, lightSyntax: defaults.LightSyntax, compilingFsLib: defaults.CompilingFsLib, isExe: defaults.IsExe ); } var parseAsync = _provider.CheckerInstance.ParseFile(path, sourceText, parseOpts, "FsLint"); var parseResults = await FSharpAsync.StartAsTask(parseAsync, null, token).ConfigureAwait(false); return(parseResults, null); }
public override ILexer MakeLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) { return(new LaconfigLexer(context, srcRef, source, messages, throwErrors)); }
/// <summary> /// Deserializes JSON content into IJSONDataObject /// </summary> public static IJsonDataObject JsonToDataObject(this ISourceText json, bool caseSensitiveMaps = true) { return(JsonReader.DeserializeDataObject(json, caseSensitiveMaps)); }
/// <summary> /// Deserializes JSON content into dynamic JSON object /// </summary> public static dynamic JsonToDynamic(this ISourceText json, bool caseSensitiveMaps = true) { return(JsonReader.DeserializeDynamic(json, caseSensitiveMaps)); }
protected Lexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) : base(context, srcRef, messages, throwErrors) { m_Source = source; }
protected Lexer(ISourceText source, MessageList messages = null, bool throwErrors = false) : this(null, new SourceCodeRef(source.Name ?? CoreConsts.UNNAMED_MEMORY_BUFFER), source, messages, throwErrors) { }
public override ILexer MakeLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) { return new LaconfigLexer(context, srcRef, source, messages, throwErrors); }
public int CompareTo(ISourceText other) { return string.Compare(Name, other.Name, StringComparison.InvariantCultureIgnoreCase); }
public static dynamic DeserializeDynamic(ISourceText source, bool caseSensitiveMaps = true) => deserializeDynamic(ReaderBackend.DeserializeFromJson(source, caseSensitiveMaps));
public CSLexer(ISourceText source, MessageList messages = null, bool throwErrors = false) : base(source, messages, throwErrors) { m_FSM = new FSM(this); }
/// <summary> /// Deserializes JSON content into IJSONDataObject /// </summary> public static IJSONDataObject JSONToDataObject(this ISourceText json) { return(JSONReader.DeserializeDataObject(json)); }
public CompilationUnitBuilder(ISourceText sourceFile, IEnumerable <Diagnostic> errors) { this.sourceFile = sourceFile; this.errors = errors; }
/// <summary> /// Makes lexer capable of this language analysis /// </summary> public abstract ILexer MakeLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false);
public unsafe DocumentText(ISourceTextBuffer textProvider) { if(textProvider == null) { Debug.Assert(false); return; } this.TextProvider = textProvider; this.AsciiStringPtr = textProvider.Buffer; this.Length = textProvider.Length; }
public DocumentText(ISourceText textProvider) { if(textProvider == null) { Debug.Assert(false); return; } this.TextProvider = textProvider; this.Length = textProvider.Length; }
public override ILexer MakeLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) { throw new NotImplementedException(GetType().Name+".MakeLexer()"); }
/// <summary> /// Deserializes JSON content into dynamic JSON object /// </summary> public static dynamic JSONToDynamic(this ISourceText json) { return(JSONReader.DeserializeDynamic(json)); }
public static IJSONDataObject DeserializeDataObject(ISourceText source) { return(deserializeObject(read(source))); }
public int CompareTo(ISourceText other) { return(string.Compare(Name, other.Name, StringComparison.InvariantCultureIgnoreCase)); }
public void AddBindingError(ISourceText file, TextPosition position, string message) { diagnostics.Add(new Diagnostic(DiagnosticLevel.CompilationError, CompilerPhase.Binding, file, position, message)); }
public CSLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) : base(context, srcRef, source, messages, throwErrors) { m_FSM = new FSM(this); }
private void read(ISourceText source) { var context = new LaconfigData(this); var lexer = new LaconfigLexer(source, throwErrors: true); var parser = new LaconfigParser(context, lexer, throwErrors: true); parser.Parse(); }
public static IJsonDataObject DeserializeDataObject(ISourceText source, bool caseSensitiveMaps = true) => deserializeObject(ReaderBackend.DeserializeFromJson(source, caseSensitiveMaps));
public override ILexer MakeLexer(IAnalysisContext context, SourceCodeRef srcRef, ISourceText source, MessageList messages = null, bool throwErrors = false) { throw new NotSupportedException("UnspecifiedLanguage.MakeLexer()"); }