/// <summary> /// Generates a final script of the given document. This document has been generated by the build process and /// contains the entire script in syntax tree form. /// </summary> /// <param name="composition"></param> /// <param name="config"></param> /// <returns></returns> public override async Task <string> GenerateAsync(ProgramComposition composition, ProjectScriptInfo config) { var analyzer = new DocumentAnalyzer(); var result = await analyzer.AnalyzeAsync(composition.Document); var buffer = new StringBuilder(); var programContent = string.Join("\n", result.Parts.OfType <ProgramScriptPart>().Select(p => p.GenerateContent())); buffer.Append(programContent); buffer.Append("\n"); var extensionContent = string.Join("\n", result.Parts.OfType <ExtensionScriptPart>().Select(p => p.GenerateContent())); if (!string.IsNullOrWhiteSpace(extensionContent)) { // Extension classes are made possible by forcefully ending Space Engineer's wrapping Program class // and removing the final ending brace of the last extension class to let Space Engineers close it // for itself. // Close off the Program class buffer.Append("}\n"); buffer.Append(extensionContent); // Remove the ending brace of the last extension class var index = FindEndBrace(buffer); if (index >= 0) { buffer.Length = index; } } return(TrimPointlessWhitespace(buffer.ToString())); }
/* * public static PropertyDescriptor GetNamedNonNullStringTypeReference( * string referenceName) => * new( * referenceName, * new NonNullTypeDescriptor(new NamedTypeDescriptor("string", "global::System.", false))); * * public static PropertyDescriptor GetNamedNullableStringTypeReference( * string referenceName) => * new( * referenceName, * new NamedTypeDescriptor("string", "System", false)); * * public static PropertyDescriptor GetNamedNonNullIntTypeReference( * string referenceName) => * new( * referenceName, * new NonNullTypeDescriptor(new NamedTypeDescriptor("int", "System", false))); */ public static async Task <ClientModel> CreateClientModelAsync(params string[] sourceText) { ISchema schema = await new ServiceCollection() .AddStarWarsRepositories() .AddGraphQL() .AddStarWars() .BuildSchemaAsync(); var documents = sourceText .Select(sourceText => ("", Utf8GraphQLParser.Parse(sourceText))) .ToList(); var typeSystemDocs = documents.GetTypeSystemDocuments().ToList(); typeSystemDocs.Add(("", schema.ToDocument())); var executableDocs = documents.GetExecutableDocuments().ToList(); var analyzer = new DocumentAnalyzer(); analyzer.SetSchema(SchemaHelper.Load(typeSystemDocs)); foreach (DocumentNode executable in executableDocs.Select(doc => doc.document)) { analyzer.AddDocument(executable); } return(analyzer.Analyze()); }
public static void UpdateAndWait(this DocumentAnalyzer analyzer, TextViewLayoutChangedEventArgs changes) { IndentGuidePackage.JoinableTaskFactory.Run(async delegate { await analyzer.UpdateAsync(changes).ConfigureAwait(true); }); }
public static void ResetAndWait(this DocumentAnalyzer analyzer) { IndentGuidePackage.JoinableTaskFactory.Run(async delegate { await analyzer.ResetAsync().ConfigureAwait(true); }); }
public void PASS_Serialize() { DocumentAnalyzer analyzer = new DocumentAnalyzer("analyzer_path"); string json = JsonConvert.SerializeObject(analyzer); Assert.IsNotNull(json); string expectedJson = "{\"path\":\"analyzer_path\"}"; Assert.AreEqual(expectedJson, json); }
public void Operation_With_MultipleOperations() { // arrange ClientModel clientModel = new DocumentAnalyzer() .SetSchema( SchemaHelper.Load( ("", Utf8GraphQLParser.Parse(@" schema { query: Query } type Query { foo(single: Bar!, list: [Bar!]!, nestedList: [[Bar]]): String } input Bar { str: String strNonNullable: String! nested: Bar nestedList: [Bar!]! nestedMatrix: [[Bar]] }")) )) .AddDocument( Utf8GraphQLParser.Parse( @" query TestOperation($single: Bar!, $list: [Bar!]!, $nestedList: [[Bar!]]) { foo(single: $single, list: $list, nestedList:$nestedList) } ")) .AddDocument( Utf8GraphQLParser.Parse( @" query TestOperation2($single: Bar!, $list: [Bar!]!, $nestedList: [[Bar!]]) { foo(single: $single, list: $list, nestedList:$nestedList) } ")) .AddDocument( Utf8GraphQLParser.Parse( @" query TestOperation3($single: Bar!, $list: [Bar!]!, $nestedList: [[Bar!]]) { foo(single: $single, list: $list, nestedList:$nestedList) } ")) .AddDocument(Utf8GraphQLParser.Parse("extend schema @key(fields: \"id\")")) .Analyze(); // act var documents = new StringBuilder(); var generator = new CSharpGeneratorExecutor(); // assert AssertResult(clientModel, generator, documents); }
public LineTextPreview() { SetStyle(ControlStyles.OptimizedDoubleBuffer | ControlStyles.ResizeRedraw | ControlStyles.Opaque, true); InitializeComponent(); _IndentSize = 4; _Theme = null; Analysis = null; }
public void DocumentAnalyzer_ShouldGuessPossiblyStyle() { using( var doc = DocX.Load( "./docx/headers_and_lists.docx" ) ) { var analyzer = new DocumentAnalyzer( doc ); analyzer.Analyze(); var result = analyzer.Result.Values; Assert.Equal( 5, result.Count() ); Assert.Equal( "Header1", result.ElementAt( 0 ) ); Assert.Equal( "Header2", result.ElementAt( 1 ) ); Assert.Equal( "P", result.ElementAt( 2 ) ); } }
public void DocumentAnalyzer_ShouldAnalyzeDocument() { using( var doc = DocX.Load( "./docx/headers_and_lists.docx" ) ) { var analyzer = new DocumentAnalyzer( doc ); analyzer.Analyze(); var result = analyzer.Result.Keys; Assert.Equal( 5, result.Count() ); Assert.Equal( "Nagwek001", result.ElementAt( 0 ) ); Assert.True( result.ElementAt( 3 ).StartsWith( "ordered_" ) ); Assert.True( result.ElementAt( 4 ).StartsWith( "unordered_" ) ); } }
protected override void Analyze(AnalysisServices services, RepoFile file) { ProjectAnalyzer.DocumentAnalysisTasks.Add(services.TaskDispatcher.Invoke(async() => { try { ReportStartAnalyze(file); var project = ProjectAnalyzer.Project; if (project == null) { file.PrimaryProject.Repo.AnalysisServices.Logger.LogError("Project is null"); return(null); } var document = project.GetDocument(DocumentInfo.Id); var text = await document.GetTextAsync(); SourceFile sourceFile = new SourceFile() { Info = AugmentSourceFileInfo(new SourceFileInfo() { Language = project.Language, Path = file.LogicalPath, RepoRelativePath = file.RepoRelativePath }), }; BoundSourceFileBuilder binder = CreateBuilder(sourceFile, file, file.PrimaryProject.ProjectId); binder.SourceText = text; DocumentAnalyzer analyzer = new DocumentAnalyzer( ProjectAnalyzer.semanticServices, document, ProjectAnalyzer.CompilationServices, file.LogicalPath, ProjectAnalyzer.ProjectContext, binder); var boundSourceFile = await analyzer.CreateBoundSourceFile(); ProjectAnalyzer.ProjectContext.ReportDocument(boundSourceFile, file); UploadSourceFile(services, file, boundSourceFile); return(boundSourceFile); } finally { file.Analyzer = RepoFileAnalyzer.Null; ProjectAnalyzer = null; } })); }
public static void AssertLinesInclude(this DocumentAnalyzer actual, params LineSpan[] expected) { var missingLines = new HashSet <LineSpan>(expected).Except(actual.GetAllLines()); if (missingLines.Any()) { Assert.Fail("Lines not found:\r\n{0}\r\n\r\nActual lines:\r\n{1}", missingLines.ToFormattedString(), actual.GetAllLines().ToFormattedString()); } }
public void DocumentAnalyzer_ShouldAnalyzeDocument() { using (var doc = DocX.Load("./docx/headers_and_lists.docx")) { var analyzer = new DocumentAnalyzer(doc); analyzer.Analyze(); var result = analyzer.Result.Keys; Assert.Equal(5, result.Count()); Assert.Equal("Nagwek001", result.ElementAt(0)); Assert.True(result.ElementAt(3).StartsWith("ordered_")); Assert.True(result.ElementAt(4).StartsWith("unordered_")); } }
public void DocumentAnalyzer_ShouldGuessPossiblyStyle() { using (var doc = DocX.Load("./docx/headers_and_lists.docx")) { var analyzer = new DocumentAnalyzer(doc); analyzer.Analyze(); var result = analyzer.Result.Values; Assert.Equal(5, result.Count()); Assert.Equal("Header1", result.ElementAt(0)); Assert.Equal("Header2", result.ElementAt(1)); Assert.Equal("P", result.ElementAt(2)); } }
public static ClientModel CreateClientModelAsync(string queryResource, string schemaResource) { ISchema schema = SchemaHelper.Load( (string.Empty, Utf8GraphQLParser.Parse(Open(schemaResource))), (string.Empty, Utf8GraphQLParser.Parse("extend schema @key(fields: \"id\")"))); DocumentNode document = Utf8GraphQLParser.Parse(Open(queryResource)); return(DocumentAnalyzer .New() .SetSchema(schema) .AddDocument(document) .Analyze()); }
private void Run() { PerformanceLogger.DumpEvents += PerformanceLogger_DumpEvents; try { foreach (var file in _testFiles) { var buffer = new MockTextBuffer(File.ReadAllText(file)); var snapshot = buffer.CurrentSnapshot; var behaviour = new LineBehavior { VisibleEmpty = true, VisibleEmptyAtEnd = true, VisibleAligned = true, VisibleAtTextEnd = false, ExtendInwardsOnly = true, VisibleUnaligned = true }; foreach (var chunkSize in new[] { 5, 10, 30, 50, 100, 150, 200 }) { var da = new DocumentAnalyzer(snapshot, behaviour, 4, 4, chunkSize); var sw = Stopwatch.StartNew(); for (int repeats = 1000; repeats > 0; --repeats) { IndentGuidePackage.JoinableTaskFactory.Run(async delegate { await da.ResetAsync().ConfigureAwait(true); }); } for (int line = 0; line < da.Snapshot.LineCount; line += 30) { var lines = da.GetLines(line, line + 35).ToList(); } sw.Stop(); Console.WriteLine("ChunkSize = {0}", chunkSize); Console.WriteLine("Duration = {0}", sw.ElapsedTicks / 1000); PrintEventSummary(1000); Console.WriteLine(); } } } finally { PerformanceLogger.DumpEvents -= PerformanceLogger_DumpEvents; } }
private static ClientModel CreateClientModel(params string[] sourceText) { var documents = sourceText .Select(sourceText => (string.Empty, Utf8GraphQLParser.Parse(sourceText))) .ToList(); var typeSystemDocs = documents.GetTypeSystemDocuments().ToList(); var executableDocs = documents.GetExecutableDocuments().ToList(); var analyzer = new DocumentAnalyzer(); analyzer.SetSchema(SchemaHelper.Load(typeSystemDocs)); foreach (DocumentNode executable in executableDocs.Select(doc => doc.document)) { analyzer.AddDocument(executable); } return(analyzer.Analyze()); }
public static void AssertLinesIncludeExactly(this DocumentAnalyzer actual, params LineSpan[] expected) { var missingLines = new HashSet <LineSpan>(expected).Except(actual.GetAllLines()); var unexpectedLines = new HashSet <LineSpan>(actual.GetAllLines()).Except(expected); var message = new List <string>(); if (missingLines.Any()) { message.Add("Lines not found:\r\n" + missingLines.ToFormattedString()); } if (unexpectedLines.Any()) { message.Add("Unexpected lines:\r\n" + unexpectedLines.ToFormattedString()); } if (message.Any()) { message.Add("Actual lines:\r\n" + actual.GetAllLines().Distinct().ToFormattedString()); Assert.Fail(string.Join("\r\n\r\n", message)); } }
private static ClientModel CreateClientModel(string[] sourceText, bool strictValidation) { var files = sourceText .Select(s => new GraphQLFile(Utf8GraphQLParser.Parse(s))) .ToList(); var typeSystemDocs = files.GetTypeSystemDocuments().ToList(); var executableDocs = files.GetExecutableDocuments().ToList(); var analyzer = new DocumentAnalyzer(); analyzer.SetSchema(SchemaHelper.Load(typeSystemDocs, strictValidation)); foreach (DocumentNode executable in executableDocs.Select(file => file.Document)) { analyzer.AddDocument(executable); } return(analyzer.Analyze()); }
public static async Task <ClientModel> CreateClientModelAsync(string query) { ISchema schema = await new ServiceCollection() .AddStarWarsRepositories() .AddGraphQL() .AddStarWars() .BuildSchemaAsync(); schema = SchemaHelper.Load( (string.Empty, schema.ToDocument()), (string.Empty, Utf8GraphQLParser.Parse("extend schema @key(fields: \"id\")"))); DocumentNode document = Utf8GraphQLParser.Parse(query); return(DocumentAnalyzer .New() .SetSchema(schema) .AddDocument(document) .Analyze()); }
async Task <string> GenerateScriptAsync(ProgramComposition composition) { var root = composition.RootNode; composition = await composition.WithNewDocumentRootAsync(root); var analyzer = new DocumentAnalyzer(); var result = await analyzer.AnalyzeAsync(composition.Document); var buffer = new StringBuilder(); var programContent = string.Join("", result.Parts.OfType <ProgramScriptPart>().Select(p => p.GenerateContent())); buffer.Append(programContent); var extensionContent = string.Join("", result.Parts.OfType <ExtensionScriptPart>().Select(p => p.GenerateContent())); if (!string.IsNullOrWhiteSpace(extensionContent)) { // Extension classes are made possible by forcefully ending Space Engineer's wrapping Program class // and removing the final ending brace of the last extension class to let Space Engineers close it // for itself. // Close off the Program class. Unfortunately we do need a newline because the whitespace compactor // can't properly deal with the separation. buffer.Append("\n}"); buffer.Append(extensionContent); // Remove the ending brace of the last extension class var index = FindEndBrace(buffer); if (index >= 0) { buffer.Length = index; } } return(TrimPointlessWhitespace(buffer.ToString())); }
/// <summary> /// Instantiates a new indent guide manager for a view. /// </summary> /// <param name="view">The text view to provide guides for.</param> /// <param name="service">The Indent Guide service.</param> public IndentGuideView(IWpfTextView view, IIndentGuide service) { View = view; if (!service.Themes.TryGetValue(View.TextDataModel.ContentType.DisplayName, out Theme)) { Theme = service.DefaultTheme; } if (Theme != null && Theme.Behavior != null && Theme.Behavior.Disabled) return; GuideBrushCache = new Dictionary<System.Drawing.Color, Brush>(); GlowEffectCache = new Dictionary<System.Drawing.Color, Effect>(); View.Caret.PositionChanged += Caret_PositionChanged; View.LayoutChanged += View_LayoutChanged; View.Options.OptionChanged += View_OptionChanged; Layer = view.GetAdornmentLayer("IndentGuide"); Canvas = new Canvas(); Canvas.HorizontalAlignment = HorizontalAlignment.Stretch; Canvas.VerticalAlignment = VerticalAlignment.Stretch; Layer.AddAdornment(AdornmentPositioningBehavior.OwnerControlled, null, null, Canvas, CanvasRemoved); Debug.Assert(Theme != null, "No themes loaded"); if (Theme == null) { Theme = new IndentTheme(); } service.ThemesChanged += new EventHandler(Service_ThemesChanged); Analysis = new DocumentAnalyzer( View.TextSnapshot, Theme.Behavior, View.Options.GetOptionValue(DefaultOptions.IndentSizeOptionId), View.Options.GetOptionValue(DefaultOptions.TabSizeOptionId) ); GlobalVisible = service.Visible; service.VisibleChanged += new EventHandler(Service_VisibleChanged); var t = AnalyzeAndUpdateAdornments(); }
/// <summary> /// Raised when the theme is updated. /// </summary> async void Service_ThemesChanged(object sender, EventArgs e) { var service = (IIndentGuide)sender; if (!service.Themes.TryGetValue(View.TextDataModel.ContentType.DisplayName, out Theme)) { Theme = service.DefaultTheme; } Analysis = new DocumentAnalyzer( View.TextSnapshot, Theme.Behavior, View.Options.GetOptionValue(DefaultOptions.IndentSizeOptionId), View.Options.GetOptionValue(DefaultOptions.TabSizeOptionId) ); GuideBrushCache.Clear(); GlowEffectCache.Clear(); await AnalyzeAndUpdateAdornments(); }
/// <summary> /// Raised when a view option changes. /// </summary> async void View_OptionChanged(object sender, EditorOptionChangedEventArgs e) { if (e.OptionId == DefaultOptions.IndentSizeOptionId.Name) { Analysis = new DocumentAnalyzer( View.TextSnapshot, Theme.Behavior, View.Options.GetOptionValue(DefaultOptions.IndentSizeOptionId), View.Options.GetOptionValue(DefaultOptions.TabSizeOptionId) ); GuideBrushCache.Clear(); GlowEffectCache.Clear(); await AnalyzeAndUpdateAdornments(); } }
public AuctionService(WebScraper.WebScraper webScraper) { _documentAnalyzer = new DocumentAnalyzer(); _webScraper = webScraper; }
public override void Refresh() { base.Refresh(); if (Theme != null && Theme.Behavior != null && IsHandleCreated) { var snapshot = new FakeSnapshot(Text); Analysis = new DocumentAnalyzer(snapshot, Theme.Behavior, IndentSize, IndentSize); Analysis.Reset().ContinueWith(t => { BeginInvoke((Action)Invalidate); }); } }
public void PASS_Create() { DocumentAnalyzer analyzer = new DocumentAnalyzer("analyzer_path"); Assert.IsNotNull(analyzer); Assert.AreEqual("analyzer_path", analyzer.Path); }
public static void UpdateAndWait(this DocumentAnalyzer analyzer, TextViewLayoutChangedEventArgs changes) { analyzer.Update(changes).GetAwaiter().GetResult(); }
private void Run() { PerformanceLogger.DumpEvents += PerformanceLogger_DumpEvents; try { foreach (var file in _testFiles) { var buffer = new MockTextBuffer(File.ReadAllText(file)); var snapshot = buffer.CurrentSnapshot; var behaviour = new LineBehavior { VisibleEmpty = true, VisibleEmptyAtEnd = true, VisibleAligned = true, VisibleAtTextEnd = false, ExtendInwardsOnly = true, VisibleUnaligned = true }; foreach (var chunkSize in new[] { 5, 10, 30, 50, 100, 150, 200 }) { var da = new DocumentAnalyzer(snapshot, behaviour, 4, 4, chunkSize); var sw = Stopwatch.StartNew(); for (int repeats = 1000; repeats > 0; --repeats) { da.Reset().GetAwaiter().GetResult(); } for (int line = 0; line < da.Snapshot.LineCount; line += 30) { var lines = da.GetLines(line, line + 35).ToList(); } sw.Stop(); Console.WriteLine("ChunkSize = {0}", chunkSize); Console.WriteLine("Duration = {0}", sw.ElapsedTicks / 1000); PrintEventSummary(1000); Console.WriteLine(); } } } finally { PerformanceLogger.DumpEvents -= PerformanceLogger_DumpEvents; } }
public CSharpGeneratorResult Generate( IEnumerable <string> graphQLFiles, string clientName = "GraphQL", string @namespace = "StrawberryShake.GraphQL") { if (graphQLFiles is null) { throw new ArgumentNullException(nameof(graphQLFiles)); } var errors = new List <IError>(); var documents = new List <(string file, DocumentNode document)>(); foreach (var file in graphQLFiles) { try { documents.Add((file, Utf8GraphQLParser.Parse(File.ReadAllBytes(file)))); } catch (SyntaxException syntaxException) { errors.Add( Generator_SyntaxException( syntaxException, file)); } } if (errors.Count > 0) { return(new CSharpGeneratorResult( new List <CSharpDocument>(), errors)); } var typeSystemDocs = documents.GetTypeSystemDocuments(); var executableDocs = documents.GetExecutableDocuments(); if (typeSystemDocs.Count == 0) { errors.AddRange(Generator_NoTypeDocumentsFound()); } if (executableDocs.Count == 0) { errors.AddRange(Generator_NoExecutableDocumentsFound()); } if (errors.Any()) { return(new CSharpGeneratorResult( new List <CSharpDocument>(), errors)); } ISchema schema = SchemaHelper.Load(typeSystemDocs); IDocumentValidator validator = new ServiceCollection() .AddValidation() .Services .BuildServiceProvider() .GetRequiredService <IDocumentValidatorFactory>() .CreateValidator(); // TODO: MST we need to rework this to reflect back on the correct file var merged = new DocumentNode( executableDocs.SelectMany(t => t.document.Definitions).ToList()); var validationResult = validator.Validate( schema, merged); if (validationResult.HasErrors) { errors.AddRange( validationResult.Errors.Select( error => error .WithCode(CodeGenerationErrorCodes.SchemaValidationError) .WithExtensions(new Dictionary <string, object?> { { TitleExtensionKey, "Schema validation error" } }))); } /* * foreach ((string file, DocumentNode document) executableDoc in executableDocs) * { * var validationResult = validator.Validate( * schema, * executableDoc.document); * if (validationResult.HasErrors) * { * errors.AddRange( * validationResult.Errors * .Select( * error => error * .WithCode(CodeGenerationErrorCodes.SchemaValidationError) * .WithExtensions(new Dictionary<string, object?> * { * { FileExtensionKey, executableDoc.file }, * { TitleExtensionKey, "Schema validation error" } * }))); * } * } */ if (errors.Any()) { return(new CSharpGeneratorResult( new List <CSharpDocument>(), errors)); } var analyzer = new DocumentAnalyzer(); analyzer.SetSchema(schema); foreach ((string file, DocumentNode document)executableDocument in executableDocs) { analyzer.AddDocument(executableDocument.document); } ClientModel clientModel = analyzer.Analyze(); var executor = new CSharpGeneratorExecutor(); return(new CSharpGeneratorResult( executor.Generate( clientModel, @namespace, clientName).ToList(), errors)); }
public static CSharpGeneratorResult Generate( IEnumerable <string> fileNames, CSharpGeneratorSettings?settings = null) { if (fileNames is null) { throw new ArgumentNullException(nameof(fileNames)); } settings ??= new(); if (string.IsNullOrEmpty(settings.ClientName)) { throw new ArgumentException( string.Format( Resources.CSharpGenerator_Generate_ArgumentCannotBeNull, nameof(settings.ClientName)), nameof(settings)); } if (string.IsNullOrEmpty(settings.Namespace)) { throw new ArgumentException( string.Format( Resources.CSharpGenerator_Generate_ArgumentCannotBeNull, nameof(settings.Namespace)), nameof(settings)); } var files = new List <GraphQLFile>(); var errors = new List <IError>(); // parse the GraphQL files ... if (!TryParseDocuments(fileNames, files, errors)) { return(new(errors)); } // divide documents into type system document for the schema // and executable documents. IReadOnlyList <GraphQLFile> typeSystemFiles = files.GetTypeSystemDocuments(); IReadOnlyList <GraphQLFile> executableFiles = files.GetExecutableDocuments(); if (typeSystemFiles.Count == 0 || executableFiles.Count == 0) { // if we do not have any documents we will just return without any errors. return(new()); } // Since form this point on we will work on a merged executable document we need to // index the syntax nodes so that we can link errors to the correct files. var fileLookup = new Dictionary <ISyntaxNode, string>(); IndexSyntaxNodes(files, fileLookup); // We try true create a schema from the type system documents. // If we cannot create a schema we will return the schema validation errors. if (!TryCreateSchema( typeSystemFiles, fileLookup, errors, settings.StrictSchemaValidation, out ISchema? schema)) { return(new(errors)); } // Next we will start validating the executable documents. if (!TryValidateRequest(schema, executableFiles, fileLookup, errors)) { return(new(errors)); } // At this point we have a valid schema and know that our documents are executable // against the schema. // // In order to generate the client code we will first need to create a client model // which represents the logical parts of the executable documents. var analyzer = new DocumentAnalyzer(); analyzer.SetSchema(schema); foreach (GraphQLFile executableDocument in executableFiles) { analyzer.AddDocument(executableDocument.Document); } ClientModel clientModel = analyzer.Analyze(); // With the client model we finally can create CSharp code. return(Generate(clientModel, settings)); }
public static void ResetAndWait(this DocumentAnalyzer analyzer) { analyzer.Reset().GetAwaiter().GetResult(); }