public void GlobalSetup() { _useExportProviderAttribute.Before(null); if (_workspace != null) { throw new InvalidOperationException(); } _workspace = TestWorkspace.Create( @"<Workspace> <Project Language=""NoCompilation"" CommonReferences=""false""> <Document> // a no-compilation document </Document> </Project> </Workspace>"); var connectionPoolService = _workspace.ExportProvider.GetExportedValue <SQLiteConnectionPoolService>(); var asyncListener = _workspace.ExportProvider.GetExportedValue <IAsynchronousOperationListenerProvider>().GetListener(FeatureAttribute.PersistentStorage); _storageService = new SQLitePersistentStorageService(connectionPoolService, new StorageConfiguration(), asyncListener); var solution = _workspace.CurrentSolution; _storage = _storageService.GetStorageWorkerAsync(SolutionKey.ToSolutionKey(solution), CancellationToken.None).AsTask().GetAwaiter().GetResult(); Console.WriteLine("Storage type: " + _storage.GetType()); _document = _workspace.CurrentSolution.Projects.Single().Documents.Single(); _random = new Random(0); }
private async Task <bool> SaveAsync( Document document, CancellationToken cancellationToken) { var solution = document.Project.Solution; var persistentStorageService = solution.Workspace.Services.GetPersistentStorageService(solution.Options); try { var storage = await persistentStorageService.GetStorageAsync(SolutionKey.ToSolutionKey(solution), cancellationToken).ConfigureAwait(false); await using var _ = storage.ConfigureAwait(false); using var stream = SerializableBytes.CreateWritableStream(); using (var writer = new ObjectWriter(stream, leaveOpen: true, cancellationToken)) { WriteTo(writer); } stream.Position = 0; return(await storage.WriteStreamAsync(document, PersistenceName, stream, this.Checksum, cancellationToken).ConfigureAwait(false)); } catch (Exception e) when(IOUtilities.IsNormalIOException(e)) { // Storage APIs can throw arbitrary exceptions. } return(false); }
private static async Task <bool> PrecalculatedAsync( Document document, Checksum checksum, CancellationToken cancellationToken) { var solution = document.Project.Solution; var persistentStorageService = solution.Workspace.Services.GetPersistentStorageService(solution.Options); // check whether we already have info for this document try { var storage = await persistentStorageService.GetStorageAsync(SolutionKey.ToSolutionKey(solution), cancellationToken).ConfigureAwait(false); await using var _ = storage.ConfigureAwait(false); // Check if we've already stored a checksum and it matches the checksum we // expect. If so, we're already precalculated and don't have to recompute // this index. Otherwise if we don't have a checksum, or the checksums don't // match, go ahead and recompute it. return(await storage.ChecksumMatchesAsync(document, PersistenceName, checksum, cancellationToken).ConfigureAwait(false)); } catch (Exception e) when(IOUtilities.IsNormalIOException(e)) { // Storage APIs can throw arbitrary exceptions. } return(false); }
public void GlobalSetup() { _useExportProviderAttribute.Before(null); if (_workspace != null) { throw new InvalidOperationException(); } _workspace = TestWorkspace.Create( @"<Workspace> <Project Language=""NoCompilation"" CommonReferences=""false""> <Document> // a no-compilation document </Document> </Project> </Workspace>" ); // Explicitly choose the sqlite db to test. _workspace.TryApplyChanges( _workspace.CurrentSolution.WithOptions( _workspace.Options.WithChangedOption( StorageOptions.Database, StorageDatabase.SQLite ) ) ); var connectionPoolService = _workspace.ExportProvider.GetExportedValue <SQLiteConnectionPoolService>(); _storageService = new SQLitePersistentStorageService( connectionPoolService, new LocationService() ); var solution = _workspace.CurrentSolution; _storage = _storageService .GetStorageWorkerAsync( _workspace, SolutionKey.ToSolutionKey(solution), solution, CancellationToken.None ) .AsTask() .GetAwaiter() .GetResult(); if (_storage == NoOpPersistentStorage.Instance) { throw new InvalidOperationException( "We didn't properly get the sqlite storage instance." ); } Console.WriteLine("Storage type: " + _storage.GetType()); _document = _workspace.CurrentSolution.Projects.Single().Documents.Single(); _random = new Random(0); }
public void CacheDirectoryShouldNotBeAtRoot() { var workspace = new AdhocWorkspace(FeaturesTestCompositions.Features.GetHostServices()); workspace.AddSolution(SolutionInfo.Create(SolutionId.CreateNewId(), new VersionStamp(), @"D:\git\PCLCrypto\PCLCrypto.sln")); var configuration = workspace.Services.GetRequiredService <IPersistentStorageConfiguration>(); var location = configuration.TryGetStorageLocation(SolutionKey.ToSolutionKey(workspace.CurrentSolution)); Assert.False(location?.StartsWith("/") ?? false); }
private async Task LoadSolutionAsync() { var roslynRoot = Environment.GetEnvironmentVariable(Program.RoslynRootPathEnvVariableName); _solutionPath = Path.Combine(roslynRoot, @"Roslyn.sln"); if (!File.Exists(_solutionPath)) { throw new ArgumentException("Couldn't find Roslyn.sln"); } Console.WriteLine("Found Roslyn.sln: " + Process.GetCurrentProcess().Id); var assemblies = MSBuildMefHostServices.DefaultAssemblies .Add(typeof(AnalyzerRunnerHelper).Assembly) .Add(typeof(FindReferencesBenchmarks).Assembly); var services = MefHostServices.Create(assemblies); _workspace = MSBuildWorkspace.Create(new Dictionary <string, string> { // Use the latest language version to force the full set of available analyzers to run on the project. { "LangVersion", "9.0" }, }, services); if (_workspace == null) { throw new ArgumentException("Couldn't create workspace"); } _workspace.TryApplyChanges(_workspace.CurrentSolution.WithOptions(_workspace.Options .WithChangedOption(StorageOptions.Database, StorageDatabase.SQLite))); Console.WriteLine("Opening roslyn. Attach to: " + Process.GetCurrentProcess().Id); var start = DateTime.Now; var solution = _workspace.OpenSolutionAsync(_solutionPath, progress: null, CancellationToken.None).Result; Console.WriteLine("Finished opening roslyn: " + (DateTime.Now - start)); // Force a storage instance to be created. This makes it simple to go examine it prior to any operations we // perform, including seeing how big the initial string table is. var storageService = _workspace.Services.GetPersistentStorageService(_workspace.CurrentSolution.Options); if (storageService == null) { throw new ArgumentException("Couldn't get storage service"); } using (var storage = await storageService.GetStorageAsync(SolutionKey.ToSolutionKey(_workspace.CurrentSolution), CancellationToken.None)) { Console.WriteLine("Sucessfully got persistent storage instance"); } }
public static DocumentKey GetDocumentKeyForCaching(Document document) { var project = document.Project; // We very intentionally persist this information against using a null 'parseOptionsChecksum'. This way the // results will be valid and something we can lookup regardless of the project configuration. In other // words, if we've cached the information when in the DEBUG state of the project, but we lookup when in the // RELEASE state, we'll still find the entry. The data may be inaccurate, but that's ok as this is just for // temporary classifying until the real classifier takes over when the solution fully loads. var projectKey = new ProjectKey(SolutionKey.ToSolutionKey(project.Solution), project.Id, project.FilePath, project.Name, Checksum.Null); return(new DocumentKey(projectKey, document.Id, document.FilePath, document.Name)); }
public async Task TestPreviewServices() { using var previewWorkspace = new PreviewWorkspace(EditorTestCompositions.EditorFeatures.GetHostServices()); var service = previewWorkspace.Services.GetService <ISolutionCrawlerRegistrationService>(); Assert.IsType <PreviewSolutionCrawlerRegistrationServiceFactory.Service>(service); var persistentService = previewWorkspace.Services.GetPersistentStorageService(previewWorkspace.CurrentSolution.Options); await using var storage = await persistentService.GetStorageAsync(SolutionKey.ToSolutionKey(previewWorkspace.CurrentSolution), CancellationToken.None); Assert.IsType <NoOpPersistentStorage>(storage); }
public static async ValueTask <SymbolTreeInfo> GetInfoForMetadataReferenceAsync( Solution solution, PortableExecutableReference reference, Checksum checksum, bool loadOnly, CancellationToken cancellationToken ) { var metadataId = GetMetadataIdNoThrow(reference); if (metadataId == null) { return(CreateEmpty(checksum)); } if (s_metadataIdToInfo.TryGetValue(metadataId, out var infoTask)) { var info = await infoTask.GetValueAsync(cancellationToken).ConfigureAwait(false); if (info.Checksum == checksum) { return(info); } } var metadata = GetMetadataNoThrow(reference); if (metadata == null) { return(CreateEmpty(checksum)); } // If the data isn't in the table, and the client only wants the data if already loaded, then bail out as we // have no results to give. The data will eventually populate in memory due to // SymbolTreeInfoIncrementalAnalyzer eventually getting around to loading it. if (loadOnly) { return(null); } return(await GetInfoForMetadataReferenceSlowAsync( solution.Workspace, SolutionKey.ToSolutionKey(solution), reference, checksum, metadata, cancellationToken ) .ConfigureAwait(false)); }
public async Task TestOpenWithSolutionReadWithDocument_WriteWithSolutionKey() { var solution = CreateOrOpenSolution(); var document = solution.Projects.Single().Documents.Single(); var streamName1 = "stream"; using (var storage = await GetStorageFromKeyAsync(solution.Workspace, SolutionKey.ToSolutionKey(solution))) { await storage.WriteStreamAsync(document, streamName1, EncodeString(GetData1(Size.Small)), checksum : s_checksum1); } using (var storage = await GetStorageAsync(solution)) { Assert.True(await storage.ChecksumMatchesAsync(document, streamName1, s_checksum1)); Assert.Equal(GetData1(Size.Small), ReadStringToEnd(await storage.ReadStreamAsync(document, streamName1))); } }
internal async Task <IChecksummedPersistentStorage> GetStorageAsync( Solution solution, IPersistentStorageFaultInjector?faultInjector = null, bool throwOnFailure = true) { // If we handed out one for a previous test, we need to shut that down first _storageService?.GetTestAccessor().Shutdown(); var configuration = new MockPersistentStorageConfiguration(solution.Id, _persistentFolder.Path, throwOnFailure); _storageService = GetStorageService((IMefHostExportProvider)solution.Workspace.Services.HostServices, configuration, faultInjector, _persistentFolder.Path); var storage = await _storageService.GetStorageAsync(SolutionKey.ToSolutionKey(solution), checkBranchId : true, CancellationToken.None); // If we're injecting faults, we expect things to be strange if (faultInjector == null) { Assert.NotEqual(NoOpPersistentStorage.TestAccessor.StorageInstance, storage); } return(storage); }
public static async Task <(DocumentKey documentKey, Checksum checksum)> GetDocumentKeyAndChecksumAsync( Document document, CancellationToken cancellationToken) { var project = document.Project; // We very intentionally persist this information against using a null 'parseOptionsChecksum'. This way the // results will be valid and something we can lookup regardless of the project configuration. In other // words, if we've cached the information when in the DEBUG state of the project, but we lookup when in the // RELEASE state, we'll still find the entry. The data may be inaccurate, but that's ok as this is just for // temporary classifying until the real classifier takes over when the solution fully loads. var projectKey = new ProjectKey(SolutionKey.ToSolutionKey(project.Solution), project.Id, project.FilePath, project.Name, Checksum.Null); var documentKey = new DocumentKey(projectKey, document.Id, document.FilePath, document.Name); // We only checksum off of the contents of the file. During load, we can't really compute any other // information since we don't necessarily know about other files, metadata, or dependencies. So during // load, we allow for the previous semantic classifications to be used as long as the file contents match. var checksums = await document.State.GetStateChecksumsAsync(cancellationToken).ConfigureAwait(false); return(documentKey, checksums.Text); }
public async Task RunAsync(CancellationToken cancellationToken) { if (!HasAnalyzers) { return; } var usePersistentStorage = _options.UsePersistentStorage; var exportProvider = (IMefHostExportProvider)_workspace.Services.HostServices; var globalOptions = exportProvider.GetExports <IGlobalOptionService>().Single().Value; globalOptions.SetGlobalOption(new OptionKey(SolutionCrawlerOptionsStorage.BackgroundAnalysisScopeOption, LanguageNames.CSharp), _options.AnalysisScope); globalOptions.SetGlobalOption(new OptionKey(SolutionCrawlerOptionsStorage.BackgroundAnalysisScopeOption, LanguageNames.VisualBasic), _options.AnalysisScope); var workspaceConfigurationService = (AnalyzerRunnerWorkspaceConfigurationService)_workspace.Services.GetRequiredService <IWorkspaceConfigurationService>(); workspaceConfigurationService.Options = new(CacheStorage : usePersistentStorage ? StorageDatabase.SQLite : StorageDatabase.None); var solutionCrawlerRegistrationService = (SolutionCrawlerRegistrationService)_workspace.Services.GetRequiredService <ISolutionCrawlerRegistrationService>(); solutionCrawlerRegistrationService.Register(_workspace); if (usePersistentStorage) { var persistentStorageService = _workspace.Services.GetPersistentStorageService(); await using var persistentStorage = await persistentStorageService.GetStorageAsync(SolutionKey.ToSolutionKey(_workspace.CurrentSolution), cancellationToken).ConfigureAwait(false); if (persistentStorage is NoOpPersistentStorage) { throw new InvalidOperationException("Benchmark is not configured to use persistent storage."); } } var incrementalAnalyzerProviders = exportProvider.GetExports <IIncrementalAnalyzerProvider, IncrementalAnalyzerProviderMetadata>(); foreach (var incrementalAnalyzerName in _options.IncrementalAnalyzerNames) { var incrementalAnalyzerProvider = incrementalAnalyzerProviders.Where(x => x.Metadata.Name == incrementalAnalyzerName).SingleOrDefault(provider => provider.Metadata.WorkspaceKinds?.Contains(_workspace.Kind) ?? false)?.Value; incrementalAnalyzerProvider ??= incrementalAnalyzerProviders.Where(x => x.Metadata.Name == incrementalAnalyzerName).SingleOrDefault(provider => provider.Metadata.WorkspaceKinds?.Contains(WorkspaceKind.Host) ?? false)?.Value; incrementalAnalyzerProvider ??= incrementalAnalyzerProviders.Where(x => x.Metadata.Name == incrementalAnalyzerName).SingleOrDefault(provider => provider.Metadata.WorkspaceKinds?.Contains(WorkspaceKind.RemoteWorkspace) ?? false)?.Value; incrementalAnalyzerProvider ??= incrementalAnalyzerProviders.Where(x => x.Metadata.Name == incrementalAnalyzerName).Single(provider => provider.Metadata.WorkspaceKinds is null).Value; var incrementalAnalyzer = incrementalAnalyzerProvider.CreateIncrementalAnalyzer(_workspace); solutionCrawlerRegistrationService.GetTestAccessor().WaitUntilCompletion(_workspace, ImmutableArray.Create(incrementalAnalyzer)); switch (incrementalAnalyzerName) { case nameof(SymbolTreeInfoIncrementalAnalyzerProvider): var symbolTreeInfoCacheService = _workspace.Services.GetRequiredService <ISymbolTreeInfoCacheService>(); var symbolTreeInfo = await symbolTreeInfoCacheService.TryGetSourceSymbolTreeInfoAsync(_workspace.CurrentSolution.Projects.First(), cancellationToken).ConfigureAwait(false); if (symbolTreeInfo is null) { throw new InvalidOperationException("Benchmark failed to calculate symbol tree info."); } break; default: // No additional actions required break; } } }
private async Task LoadSolutionAsync() { var roslynRoot = Environment.GetEnvironmentVariable(Program.RoslynRootPathEnvVariableName); var solutionPath = Path.Combine(roslynRoot, "Compilers.sln"); if (!File.Exists(solutionPath)) { throw new ArgumentException("Couldn't find Compilers.sln"); } Console.WriteLine("Found Compilers.sln: " + Process.GetCurrentProcess().Id); var assemblies = MSBuildMefHostServices.DefaultAssemblies .Add(typeof(AnalyzerRunnerHelper).Assembly) .Add(typeof(FindReferencesBenchmarks).Assembly); var services = MefHostServices.Create(assemblies); _workspace = MSBuildWorkspace.Create(new Dictionary <string, string> { // Use the latest language version to force the full set of available analyzers to run on the project. { "LangVersion", "preview" }, }, services); if (_workspace == null) { throw new ArgumentException("Couldn't create workspace"); } Console.WriteLine("Opening roslyn. Attach to: " + Process.GetCurrentProcess().Id); var start = DateTime.Now; _solution = await _workspace.OpenSolutionAsync(solutionPath, progress : null, CancellationToken.None); Console.WriteLine("Finished opening roslyn: " + (DateTime.Now - start)); // Force a storage instance to be created. This makes it simple to go examine it prior to any operations we // perform, including seeing how big the initial string table is. var storageService = _workspace.Services.GetPersistentStorageService(); if (storageService == null) { throw new ArgumentException("Couldn't get storage service"); } using (var storage = await storageService.GetStorageAsync(SolutionKey.ToSolutionKey(_workspace.CurrentSolution), CancellationToken.None)) { Console.WriteLine("Sucessfully got persistent storage instance"); } // There might be multiple projects with this name. That's ok. FAR goes and finds all the linked-projects // anyways to perform the search on all the equivalent symbols from them. So the end perf cost is the // same. var project = _solution.Projects.First(p => p.AssemblyName == "Microsoft.CodeAnalysis"); start = DateTime.Now; var compilation = await project.GetCompilationAsync(); Console.WriteLine("Time to get first compilation: " + (DateTime.Now - start)); _type = compilation.GetTypeByMetadataName("Microsoft.CodeAnalysis.SyntaxToken"); if (_type == null) { throw new Exception("Couldn't find type"); } }
public string?TryGetStorageLocation(Solution solution) => TryGetStorageLocation(solution.Workspace, SolutionKey.ToSolutionKey(solution));
public string?GetWorkingFolder() { var service = _visualStudioWorkspace.Services.GetRequiredService <IPersistentStorageConfiguration>(); return(service.TryGetStorageLocation(SolutionKey.ToSolutionKey(_visualStudioWorkspace.CurrentSolution))); }
public async Task TestOpenWithSolutionKeyReadWithDocumentKeyAndDocument2_WriteWithSolutionKey(Size size, [CombinatorialRange(0, Iterations)] int iteration) { _ = iteration; var solution = CreateOrOpenSolution(); var document = solution.Projects.Single().Documents.Single(); var streamName1 = "stream"; await using (var storage = await GetStorageFromKeyAsync(solution.Workspace, SolutionKey.ToSolutionKey(solution))) { await storage.WriteStreamAsync(document, streamName1, EncodeString(GetData1(size)), checksum : s_checksum1); } await using (var storage = await GetStorageFromKeyAsync(solution.Workspace, SolutionKey.ToSolutionKey(solution))) { Assert.True(await storage.ChecksumMatchesAsync(document, streamName1, s_checksum1)); Assert.Equal(GetData1(size), ReadStringToEnd(await storage.ReadStreamAsync(document, streamName1))); Assert.True(await storage.ChecksumMatchesAsync(DocumentKey.ToDocumentKey(document), streamName1, s_checksum1)); Assert.Equal(GetData1(size), ReadStringToEnd(await storage.ReadStreamAsync(DocumentKey.ToDocumentKey(document), streamName1))); } }