internal static bool ValidateAll(List <string> failedFileList) { if (s_Process == null) { s_Process = new ApiScraperProcess(); } if (s_Scraper == null) { s_Scraper = new ApiScraperManager(s_Process); } var failed = false; var callback = new Action <string>(delegate(string filePath) { failed = true; failedFileList?.Add(filePath); }); var oldScrapeMode = s_Scraper.scrapeMode; try { ApiScraperManager.DeleteAllTimestamps(); s_Scraper.scrapeMode = ScrapeMode.VerifyNoWrite; s_Process.verifyNoWriteFailed += callback; ScanAndScrape(); s_AssemblyPathsToScrape.Clear(); } finally { s_Scraper.scrapeMode = oldScrapeMode; s_Process.verifyNoWriteFailed -= callback; } return(!failed); }
static ApiScrapingEditorIntegration() { s_Process = new ApiScraperProcess(); s_Scraper = new ApiScraperManager(s_Process); if (Environment.GetCommandLineArgs().Contains("runTests") || Environment.GetCommandLineArgs().Contains("runEditorTests")) { // let's disable the scraper for test runs return; } s_AssemblyPathsToScrape = new List <string>(); CompilationPipeline.compilationFinished += OnCompilationFinished; CompilationPipeline.assemblyCompilationFinished += OnAssemblyCompilationFinished; scrapeOnAssemblyCompilationFinished = true; if (!ApiScrapingSingleton.instance.hasDoneFullScan) { ApiScrapingSingleton.instance.hasDoneFullScan = true; ScanAndScrape(); } }