public void AddProductDisconnectReconnectGetProducts() { m_Index = new SqlErrorIndex(StackHashSqlConfiguration.Default, SqlUtils.UnitTestDatabase, m_RootCabFolder); m_Index.DeleteIndex(); m_Index.Activate(); DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product = new StackHashProduct(creationDateTime, modifiedDateTime, "www.files.com", 25, @"P""r:o?d:u@(XP_2k)", 0, 0, "1:2:3:4"); m_Index.AddProduct(product); Assert.AreEqual(1, m_Index.TotalProducts); Assert.AreEqual(true, m_Index.ProductExists(product)); m_Index.Deactivate(); m_Index.Dispose(); m_Index = new SqlErrorIndex(StackHashSqlConfiguration.Default, SqlUtils.UnitTestDatabase, m_RootCabFolder); m_Index.Activate(); StackHashProductCollection products = m_Index.LoadProductList(); Assert.AreEqual(1, products.Count); Assert.AreEqual(1, m_Index.TotalProducts); Assert.AreEqual(true, m_Index.ProductExists(product)); }
/// <summary> /// Processes a specific report request. /// </summary> private void processReportRequest(StackHashBugReportData request) { if (this.CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Reporting events to Bug Tracker plug-ins"); } // Determine what the report type is. The report type is reported to the plugin. It indicates // whether this manual report is for a whole product, file, event, cab, script or what. m_ReportType = getReportType(request); if (request.Product == null) { // Loop through all products. StackHashProductCollection allProducts = m_Index.LoadProductList(); foreach (StackHashProduct product in allProducts) { processProduct(request, product); } } else { StackHashProduct product = m_Index.GetProduct(request.Product.Id); if (product != null) { processProduct(request, product); } } }
private void updateAllWinQualFieldsInProduct(IErrorIndex index) { index.Activate(); int i = 0; int productId = 200; DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product1 = new StackHashProduct(creationDateTime, modifiedDateTime, null, productId, "TestProduct1", 20 + i, 30 + i, "2.10.02123.1293", 20); index.AddProduct(product1); i = 1; // Causes the product fields to change (except the product id). StackHashProduct product2 = new StackHashProduct(creationDateTime.AddDays(1), modifiedDateTime.AddDays(1), null, productId, "TestProduct1", 20 + i + 1, 30 + i + 2, "2.10.02123.1293", 100); // Add the same product ID again - should replace all fields. index.AddProduct(product2, true); // Get the product list. StackHashProductCollection products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(1, products.Count); Assert.AreEqual(0, product2.CompareTo(products[0])); Assert.AreEqual(product2.TotalStoredEvents, products[0].TotalStoredEvents); StackHashProduct thisProduct = index.GetProduct(product2.Id); Assert.AreNotEqual(null, thisProduct); Assert.AreEqual(0, product2.CompareTo(thisProduct)); }
private void testLoadProductListEmptyCalledTwice(IErrorIndex index) { index.Activate(); StackHashProductCollection products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(0, products.Count); products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(0, products.Count); }
public static StackHashProductCollection GetProductsApi(ref Login login) { ProductCollection apiProducts = Product.GetProducts(ref login); StackHashProductCollection apiStackHashProducts = new StackHashProductCollection(); // Add the products first. foreach (Product product in apiProducts) { StackHashProduct stackHashProduct = ObjectConversion.ConvertProduct(product); apiStackHashProducts.Add(stackHashProduct); } return(apiStackHashProducts); }
public static StackHashProductCollection GetProductsAtom(AtomFeed feed) { // Get the list of products. AtomProductCollection atomProducts = feed.GetProducts(); // Convert to a StackHashProductCollection. StackHashProductCollection atomStackHashProducts = new StackHashProductCollection(); foreach (AtomProduct atomProduct in atomProducts) { atomStackHashProducts.Add(atomProduct.Product); } return(atomStackHashProducts); }
public static StackHashProductCollection ConvertProductCollection(AtomProductCollection products) { if (products == null) { return(null); } StackHashProductCollection winQualProductCollection = new StackHashProductCollection(); foreach (AtomProduct thisProduct in products) { winQualProductCollection.Add(thisProduct.Product); } return(winQualProductCollection); }
private void testAddSameProductTwiceWithReset(bool useCache) { IErrorIndex index = new XmlErrorIndex(m_TempPath, "Cucku"); if (useCache) { index = new ErrorIndexCache(index); } index.Activate(); int i = 0; int productId = 200; StackHashProduct product1 = new StackHashProduct(DateTime.Now.AddDays(i), DateTime.Now.AddDays(i), null, productId, "TestProduct1", 20 + i, 30 + i, "2.10.02123.1293"); index.AddProduct(product1); i = 1; // Causes the product fields to change (except the product id). product1 = new StackHashProduct(DateTime.Now.AddDays(i), DateTime.Now.AddDays(i), null, productId, "TestProduct1", 20 + i, 30 + i, "2.10.02123.1293"); // Add the same product ID again - should replace. index.AddProduct(product1); // Reload. index = new XmlErrorIndex(m_TempPath, "Cucku"); if (useCache) { index = new ErrorIndexCache(index); } index.Activate(); // Get the product list. StackHashProductCollection products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(1, products.Count); Assert.AreEqual(0, product1.CompareTo(products[0])); StackHashProduct thisProduct = index.GetProduct(product1.Id); Assert.AreNotEqual(null, thisProduct); Assert.AreEqual(0, product1.CompareTo(thisProduct)); }
private void testCacheInitialiseNProducts(IErrorIndex realIndex, int numProducts) { realIndex.Activate(); StackHashProductCollection allProducts = new StackHashProductCollection(); for (int i = 0; i < numProducts; i++) { DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc).AddDays(i); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc).AddDays(i); int productId = 200 + i; StackHashProduct product1 = new StackHashProduct(creationDateTime, modifiedDateTime, null, productId, "TestProduct1", 20 + i, 30 + i, "2.10.02123.1293"); realIndex.AddProduct(product1); } // Hook up the cache and call LoadProductList. ErrorIndexCache indexCache = new ErrorIndexCache(realIndex); indexCache.Activate(); // Get the product list. StackHashProductCollection products = indexCache.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(numProducts, products.Count); for (int i = 0; i < allProducts.Count; i++) { Assert.AreEqual(0, allProducts[0].CompareTo(products.FindProduct(allProducts[0].Id))); } // Hook up the cache afresh and call GetProduct. indexCache = new ErrorIndexCache(realIndex); indexCache.Activate(); for (int i = 0; i < allProducts.Count; i++) { StackHashProduct thisProduct = indexCache.GetProduct(allProducts[i].Id); Assert.AreNotEqual(null, thisProduct); Assert.AreEqual(0, allProducts[i].CompareTo(thisProduct)); } }
private void testSaveNProducts(IErrorIndex index, int numProducts) { index.Activate(); StackHashProductCollection allProducts = new StackHashProductCollection(); for (int i = 0; i < numProducts; i++) { DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product1 = new StackHashProduct(creationDateTime, modifiedDateTime, null, 1 + i, "TestProduct1" + i.ToString(), 20 + i, 30 + i, "2.10.02123.1293" + i.ToString()); index.AddProduct(product1); allProducts.Add(product1); } // Get the product list. StackHashProductCollection products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(numProducts, products.Count); for (int i = 0; i < allProducts.Count; i++) { StackHashProduct thisProduct = products.FindProduct(allProducts[i].Id); Assert.AreNotEqual(null, thisProduct); Assert.AreEqual(0, allProducts[i].CompareTo(thisProduct)); thisProduct = index.GetProduct(allProducts[i].Id); Assert.AreNotEqual(null, thisProduct); Assert.AreEqual(0, allProducts[i].CompareTo(thisProduct)); // Check that the dates are stored in UTC. Assert.AreEqual(true, thisProduct.DateCreatedLocal.Kind == DateTimeKind.Utc); Assert.AreEqual(true, thisProduct.DateModifiedLocal.Kind == DateTimeKind.Utc); } Assert.AreEqual(numProducts, index.TotalProducts); }
public void getProductEvents(IErrorIndex index, int numProducts, int numFiles, int numEvents) { StackHashProductCollection products = new StackHashProductCollection(); int productId = 0x1234567; int eventsForThisProduct = numEvents; int eventId = 10000; int fileId = 20; for (int productCount = 0; productCount < numProducts; productCount++) { DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product = new StackHashProduct(creationDateTime, modifiedDateTime, "www.files.com", productId + productCount, @"P""r:o?d:u@(XP_2k)", productCount, productCount + 1, "1:2:3:4"); index.AddProduct(product); products.Add(product); for (int i = 0; i < numFiles; i++) { StackHashFile file = new StackHashFile(DateTime.Now, DateTime.Now, fileId++, DateTime.Now, "FileName", "1.2.3.4"); index.AddFile(product, file); for (int j = 0; j < eventsForThisProduct; j++) { StackHashEvent theEvent = new StackHashEvent(DateTime.Now, DateTime.Now, "Event type", eventId++, new StackHashEventSignature(), j, i + 1000); theEvent.EventSignature.Parameters = new StackHashParameterCollection(); index.AddEvent(product, file, theEvent); } } eventsForThisProduct += 10; } eventsForThisProduct = numEvents; for (int productCount = 0; productCount < numProducts; productCount++) { StackHashEventPackageCollection events = index.GetProductEvents(products[productCount]); Assert.AreEqual(numFiles * eventsForThisProduct, events.Count); eventsForThisProduct += 10; } }
private void testGetProductUnknownId(IErrorIndex index) { index.Activate(); int i = 0; int productId = 200; DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product1 = new StackHashProduct(creationDateTime, modifiedDateTime, null, productId, "TestProduct1", 20 + i, 30 + i, "2.10.02123.1293"); index.AddProduct(product1); // Get the product list. StackHashProductCollection products = index.LoadProductList(); Assert.AreNotEqual(null, products); Assert.AreEqual(1, products.Count); Assert.AreEqual(0, product1.CompareTo(products[0])); StackHashProduct thisProduct = index.GetProduct(product1.Id + 1); Assert.AreEqual(null, thisProduct); }
// // You can use the following additional attributes as you write your tests: // // Use ClassInitialize to run code before running the first test in the class // [ClassInitialize()] // public static void MyClassInitialize(TestContext testContext) { } // // Use ClassCleanup to run code after all tests in a class have run // [ClassCleanup()] // public static void MyClassCleanup() { } // // Use TestInitialize to run code before running each test // [TestInitialize()] // public void MyTestInitialize() { } // // Use TestCleanup to run code after each test has run // [TestCleanup()] // public void MyTestCleanup() { } // #endregion /// <summary> /// Add n event notes /// </summary> public void getPackages(int numProducts, int numFiles, int numEvents, int numEventInfos, int numCabs, bool useSameLocale, bool incrementingEventId, bool randomEventIds) { int cabId = 1000; int eventId = 1320080390; int fileId = 0x12345678; int productId = 122; Random rand = new Random(100); m_Index = new SqlErrorIndex(StackHashSqlConfiguration.Default, SqlUtils.UnitTestDatabase, m_RootCabFolder); m_Index.DeleteIndex(); m_Index.Activate(); Dictionary <int, StackHashEventPackageCollection> expectedProductEvents = new Dictionary <int, StackHashEventPackageCollection>(); StackHashProductCollection allProducts = new StackHashProductCollection(); for (int productCount = 0; productCount < numProducts; productCount++) { DateTime creationDateTime = new DateTime(2010, 04, 04, 22, 9, 0, DateTimeKind.Utc); DateTime modifiedDateTime = new DateTime(2010, 05, 05, 23, 10, 0, DateTimeKind.Utc); StackHashProduct product1 = new StackHashProduct(creationDateTime, modifiedDateTime, null, productId++, "TestProduct1", 20, 30, "2.10.02123.1293"); m_Index.AddProduct(product1); allProducts.Add(product1); StackHashEventPackageCollection allAddedEvents = new StackHashEventPackageCollection(); for (int fileCount = 0; fileCount < numFiles; fileCount++) { StackHashFile file1 = new StackHashFile(creationDateTime, modifiedDateTime, fileId++, creationDateTime, "File1.dll", "2.3.4.5"); StackHashEventSignature eventSignature = new StackHashEventSignature(); eventSignature.Parameters = new StackHashParameterCollection(); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamApplicationName, "AppName")); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamApplicationVersion, "1.2.3.4")); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamApplicationTimeStamp, creationDateTime.ToString())); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamModuleName, "ModuleName")); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamModuleVersion, "2.3.4.5")); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamModuleTimeStamp, creationDateTime.ToString())); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamExceptionCode, "1234")); eventSignature.Parameters.Add(new StackHashParameter(StackHashEventSignature.ParamOffset, "0x1234")); eventSignature.InterpretParameters(); m_Index.AddFile(product1, file1); for (int eventCount = 0; eventCount < numEvents; eventCount++) { StackHashEvent event1 = new StackHashEvent(creationDateTime, modifiedDateTime, "EventTypeName1", eventId, eventSignature, eventCount, file1.Id, "bug" + eventCount.ToString()); if (randomEventIds) { eventId = rand.Next(100, 1320080390); } else { if (incrementingEventId) { eventId++; } else { eventId--; } } m_Index.AddEvent(product1, file1, event1); StackHashEventInfoCollection eventInfos = new StackHashEventInfoCollection(); int hitCount = 1; int totalHits = 0; for (int i = 0; i < numEventInfos; i++) { DateTime nowTime = DateTime.Now; DateTime date = new DateTime(nowTime.Year, nowTime.Month, nowTime.Day, nowTime.Hour, nowTime.Minute, 0); int localeId = 10; if (!useSameLocale) { localeId += i; } totalHits += hitCount; StackHashEventInfo eventInfo = new StackHashEventInfo(date.AddDays(i * 1), date.AddDays(i * 2), date.AddDays(i * 3), "US", localeId, "English", "Windows Vista" + i.ToString(), "1.2.3.4 build 7", hitCount++); eventInfos.Add(eventInfo); } event1.TotalHits = totalHits; m_Index.AddEventInfoCollection(product1, file1, event1, eventInfos); StackHashCabCollection cabs = new StackHashCabCollection(); for (int i = 0; i < numCabs; i++) { DateTime nowTime = DateTime.Now; DateTime date = new DateTime(nowTime.Year, nowTime.Month, nowTime.Day, nowTime.Hour, nowTime.Minute, 0); StackHashCab cab = new StackHashCab(date.AddDays(i * 1), date.AddDays(i * 2), event1.Id, event1.EventTypeName, "cab12345_23232.cab", cabId++, i * 2000); cab.DumpAnalysis = new StackHashDumpAnalysis("2 days, 5 hours, 2 mins", "1 hour, 2 mins", "2.120.222.1121212", "Microsoft Windows Vista X64 6.0.212121212 (Build 2500)", "64 bit windows"); cab.CabDownloaded = false; cabs.Add(cab); m_Index.AddCab(product1, file1, event1, cab, true); } allAddedEvents.Add(new StackHashEventPackage(eventInfos, new StackHashCabPackageCollection(cabs), event1, product1.Id)); } } expectedProductEvents[product1.Id] = allAddedEvents; } foreach (StackHashProduct product in allProducts) { StackHashEventPackageCollection allEvents = m_Index.GetProductEvents(product); StackHashEventPackageCollection expectedEvents = expectedProductEvents[product.Id]; Assert.AreNotEqual(null, allEvents); Assert.AreEqual(expectedEvents.Count, allEvents.Count); foreach (StackHashEventPackage package in allEvents) { StackHashEventPackage matchedEvent = expectedEvents.FindEventPackage(package.EventData.Id, package.EventData.EventTypeName); Assert.AreNotEqual(null, matchedEvent); Assert.AreEqual(0, package.EventInfoList.CompareTo(matchedEvent.EventInfoList)); Assert.AreEqual(0, package.Cabs.CompareTo(matchedEvent.Cabs)); } } }
/// <summary> /// An index will be created in sourceFolder\SourceIndex called SourceIndex. /// It will then be moved to destFolder\DestIndex. /// If defaultDatabaseLocation is specified then only the cab files will be moved and not the SQL database. /// </summary> private void runMoveTask(String settingsFolder, String sourceErrorIndexFolder, String sourceErrorIndexName, String destErrorIndexFolder, String destErrorIndexName, bool defaultDatabaseLocation, StackHashTestIndexData testIndexData) { String scriptFolder = settingsFolder + "\\Scripts"; SqlCommands sqlCommands = new SqlCommands(m_ProviderFactory, s_MasterConnectionString, s_MasterConnectionString, 1); // Create the source database folders and settings. if (sqlCommands.DatabaseExists(destErrorIndexName)) { try { sqlCommands.DeleteDatabase(destErrorIndexName); } catch {; } } if (sqlCommands.DatabaseExists(sourceErrorIndexName)) { try { sqlCommands.DeleteDatabase(sourceErrorIndexName); } catch {; } } if (Directory.Exists(settingsFolder)) { PathUtils.DeleteDirectory(settingsFolder, true); } if (Directory.Exists(destErrorIndexFolder)) { PathUtils.DeleteDirectory(destErrorIndexFolder, true); } if (Directory.Exists(sourceErrorIndexFolder)) { PathUtils.DeleteDirectory(sourceErrorIndexFolder, true); } if (!Directory.Exists(sourceErrorIndexFolder)) { Directory.CreateDirectory(sourceErrorIndexFolder); } if (!Directory.Exists(settingsFolder)) { Directory.CreateDirectory(settingsFolder); } if (!Directory.Exists(scriptFolder)) { Directory.CreateDirectory(scriptFolder); } if (!Directory.Exists(destErrorIndexFolder)) { Directory.CreateDirectory(destErrorIndexFolder); } try { // Create a settings manager and a new context. SettingsManager settingsManager = new SettingsManager(settingsFolder + "\\ServiceSettings.XML"); StackHashContextSettings contextSettings = settingsManager.CreateNewContextSettings(); contextSettings.ErrorIndexSettings = new ErrorIndexSettings(); contextSettings.ErrorIndexSettings.Folder = sourceErrorIndexFolder; contextSettings.ErrorIndexSettings.Name = sourceErrorIndexName; contextSettings.ErrorIndexSettings.Type = ErrorIndexType.SqlExpress; contextSettings.SqlSettings = StackHashSqlConfiguration.Default; contextSettings.SqlSettings.ConnectionString = s_ConnectionString; contextSettings.SqlSettings.InitialCatalog = sourceErrorIndexName; ScriptManager scriptManager = new ScriptManager(scriptFolder); string licenseFileName = string.Format("{0}\\License.bin", settingsFolder); LicenseManager licenseManager = new LicenseManager(licenseFileName, s_ServiceGuid); licenseManager.SetLicense(s_LicenseId); // Create a dummy controller to record the callbacks. BugTrackerManager bugTrackerManager = new BugTrackerManager(new String[0]); // Create a dummy controller to record the callbacks. ControllerContext controllerContext = new ControllerContext(contextSettings, scriptManager, new Windbg(), settingsManager, true, null, licenseManager); // Hook up to receive admin reports. controllerContext.AdminReports += new EventHandler <AdminReportEventArgs>(this.OnAdminReport); // Progress reports don't come through the controller context - they come straight through the contoller so create a dummy. Controller controller = new Controller(); Reporter reporter = new Reporter(controller); controller.AdminReports += new EventHandler <AdminReportEventArgs>(this.OnAdminReport); // ****************************************** // CREATE THE SOURCE INDEX // ****************************************** // Delete any old index first. SqlConnection.ClearAllPools(); try { controllerContext.DeleteIndex(); } catch {; } // Activate the context and the associated index - this will create the index if necessary. controllerContext.Activate(null, defaultDatabaseLocation); String[] databaseFiles = Directory.GetFiles(Path.Combine(sourceErrorIndexFolder, sourceErrorIndexName), "*.mdf"); Assert.AreEqual(defaultDatabaseLocation, databaseFiles.Length == 0); controllerContext.CreateTestIndex(testIndexData); Guid guid = new Guid(); StackHashClientData clientData = new StackHashClientData(guid, "GuidName", 1); // ****************************************** // MOVE TO DESTINATION // ****************************************** // Deactivate before the move. controllerContext.Deactivate(); StackHashSqlConfiguration sqlConfig = new StackHashSqlConfiguration(s_ConnectionString, destErrorIndexName, 1, 100, 15, 100); // Move the index. controllerContext.RunMoveIndexTask(clientData, destErrorIndexFolder, destErrorIndexName, sqlConfig); // Wait for the move task to complete. waitForMoveCompleted(60000 * 20); Assert.AreEqual(2, m_AdminReports.Count); Assert.AreEqual(null, m_AdminReports[0].Report.LastException); Assert.AreEqual(0, m_AdminReports[0].Report.ContextId); Assert.AreEqual(StackHashAdminOperation.ErrorIndexMoveStarted, m_AdminReports[0].Report.Operation); Assert.AreEqual(0, m_AdminReports[1].Report.ContextId); Assert.AreEqual(StackHashAdminOperation.ErrorIndexMoveCompleted, m_AdminReports[1].Report.Operation); Assert.AreEqual(null, m_AdminReports[1].Report.LastException); Assert.AreEqual(StackHashServiceErrorCode.NoError, m_AdminReports[1].Report.ServiceErrorCode); if ((testIndexData.NumberOfCabs > 0) && (sourceErrorIndexFolder[0] != destErrorIndexFolder[0])) { Assert.AreEqual(true, m_MoveAdminReports.Count > 0); } controllerContext.AdminReports -= new EventHandler <AdminReportEventArgs>(this.OnAdminReport); ErrorIndexSettings destIndexData = new ErrorIndexSettings() { Folder = destErrorIndexFolder, Name = destErrorIndexName, Type = ErrorIndexType.SqlExpress }; IErrorIndex index1 = getIndex(destIndexData, sqlConfig); try { index1.Activate(); // Make a single call just to ensure the database is still in tact. StackHashProductCollection products = index1.LoadProductList(); Assert.AreEqual(testIndexData.NumberOfProducts, products.Count); } finally { index1.Deactivate(); index1.Dispose(); SqlConnection.ClearAllPools(); } } finally { SqlConnection.ClearAllPools(); if (sqlCommands.DatabaseExists(destErrorIndexName)) { try { sqlCommands.DeleteDatabase(destErrorIndexName); } catch {; } } if (sqlCommands.DatabaseExists(sourceErrorIndexName)) { try { sqlCommands.DeleteDatabase(sourceErrorIndexName); } catch {; } } SqlConnection.ClearAllPools(); if (Directory.Exists(sourceErrorIndexFolder)) { PathUtils.SetFilesWritable(sourceErrorIndexFolder, true); PathUtils.DeleteDirectory(sourceErrorIndexFolder, true); } if (Directory.Exists(destErrorIndexFolder)) { PathUtils.SetFilesWritable(destErrorIndexFolder, true); PathUtils.DeleteDirectory(destErrorIndexFolder, true); } if (Directory.Exists(settingsFolder)) { PathUtils.SetFilesWritable(settingsFolder, true); PathUtils.DeleteDirectory(settingsFolder, true); } } }
public override void EntryPoint() { m_SourceIndex = m_TaskParameters.SourceIndex; m_DestinationIndex = m_TaskParameters.DestinationIndex; // Record whether the index is active or not so it can be reset at the end of the task. bool isSourceIndexActive = m_SourceIndex.IsActive; try { SetTaskStarted(m_TaskParameters.ErrorIndex); // Don't allow the computer to sleep when this task is running. StackHashUtilities.SystemInformation.DisableSleep(); // Make sure the source and destination indexes are activated. m_SourceIndex.Activate(); m_DestinationIndex.Activate(); // The % complete progress reports are sent based on the number of events copied so far. // Therefore, get the total events in the source index so this % can be calculated. m_TotalEvents = m_SourceIndex.TotalStoredEvents; // Get a list of all products in the source index. StackHashProductCollection products = m_SourceIndex.LoadProductList(); // Copy each product. foreach (StackHashProduct product in products) { if (this.CurrentTaskState.AbortRequested) { throw new StackHashException("Index product copy aborted", StackHashServiceErrorCode.Aborted); } // Add the product if it doesn't already exist. if (!m_DestinationIndex.ProductExists(product)) { m_DestinationIndex.AddProduct(product, true); // true = update all fields including non-winqual fields. } // Get the product control information. m_DestinationIndex.SetLastHitTimeLocal(product.Id, m_SourceIndex.GetLastHitTimeLocal(product.Id)); m_DestinationIndex.SetLastSyncCompletedTimeLocal(product.Id, m_SourceIndex.GetLastSyncCompletedTimeLocal(product.Id)); m_DestinationIndex.SetLastSyncStartedTimeLocal(product.Id, m_SourceIndex.GetLastSyncStartedTimeLocal(product.Id)); m_DestinationIndex.SetLastSyncTimeLocal(product.Id, m_SourceIndex.GetLastSyncTimeLocal(product.Id)); copyFiles(product); } // Copy the general control information. copyStatistics(m_SourceIndex, m_DestinationIndex); } catch (Exception ex) { LastException = ex; } finally { // Note that the context controller will check if the index needs switching so this does not need // to be done here. try { // Set the source index back to its initial state (probably deactivated) but // don't dispose of it as it is the designated context index at present. if (!isSourceIndexActive) { m_SourceIndex.Deactivate(); } } catch {} m_DestinationIndex.Deactivate(); m_DestinationIndex.Dispose(); StackHashUtilities.SystemInformation.EnableSleep(); SetTaskCompleted(m_TaskParameters.ErrorIndex); } }
public void AutoScriptOnOneCabProductNotEnabled() { // Create an index with 1 cab file. XmlErrorIndex errorIndex = new XmlErrorIndex(m_TempPath, "ErrorIndex"); errorIndex.Activate(); StackHashTestIndexData testData = new StackHashTestIndexData(); testData.NumberOfProducts = 1; testData.NumberOfFiles = 1; testData.NumberOfEvents = 1; testData.NumberOfEventInfos = 1; testData.NumberOfCabs = 1; testData.UseLargeCab = false; TestManager.CreateTestIndex(errorIndex, testData); ScriptManager scriptManager = new ScriptManager(m_TempPath + "Scripts"); Assert.AreEqual(scriptManager.NumberOfAutoScripts, scriptManager.ScriptNames.Count); ScriptResultsManager scriptResultsManager = new ScriptResultsManager(errorIndex, scriptManager, m_Debugger, m_DebuggerSettings); // Set up parameters for the task. AnalyzeTaskParameters analyzeParams = new AnalyzeTaskParameters(); // Standard task parameters. analyzeParams.IsBackgroundTask = true; analyzeParams.Name = "TestRunOneTask"; analyzeParams.RunInParallel = false; analyzeParams.UseSeparateThread = true; analyzeParams.AnalysisSettings = new StackHashAnalysisSettings(); analyzeParams.AnalysisSettings.ForceRerun = true; analyzeParams.ContextId = 0; analyzeParams.ClientData = new StackHashClientData(Guid.NewGuid(), "MarkJ", 1); analyzeParams.Debugger = m_Debugger; analyzeParams.DebuggerSettings = m_DebuggerSettings; analyzeParams.TheScriptManager = scriptManager; analyzeParams.TheScriptResultsManager = scriptResultsManager; analyzeParams.ProductsToSynchronize = new StackHashProductSyncDataCollection(); analyzeParams.ErrorIndex = errorIndex; // Create the task and run it. AnalyzeTask analyzeTask = new AnalyzeTask(analyzeParams); TaskManager taskManager = new TaskManager("Test"); taskManager.Enqueue(analyzeTask); taskManager.WaitForTaskCompletion(analyzeTask, s_TaskTimeout); Assert.AreEqual(true, analyzeTask.CurrentTaskState.TaskCompleted); StackHashProductCollection products = errorIndex.LoadProductList(); StackHashFileCollection files = errorIndex.LoadFileList(products[0]); StackHashEventCollection events = errorIndex.LoadEventList(products[0], files[0]); StackHashCabCollection cabs = errorIndex.LoadCabList(products[0], files[0], events[0]); StackHashDumpAnalysis analysis = cabs[0].DumpAnalysis; Assert.AreEqual(null, analysis.DotNetVersion); Assert.AreEqual(null, analysis.MachineArchitecture); Assert.AreEqual(null, analysis.OSVersion); Assert.AreEqual(null, analysis.ProcessUpTime); Assert.AreEqual(null, analysis.SystemUpTime); }
public void AutoScriptOnOneCabAlreadyRunButNewerVersion() { // If the auto task sees the AutoScript.log file exists and has the same // version as the current AutoScript.xml then it shouldn't run it again. // Determine this by checking the file time after a second run. // Create an index with 1 cab file. XmlErrorIndex errorIndex = new XmlErrorIndex(m_TempPath, "ErrorIndex"); errorIndex.Activate(); StackHashTestIndexData testData = new StackHashTestIndexData(); testData.NumberOfProducts = 1; testData.NumberOfFiles = 1; testData.NumberOfEvents = 1; testData.NumberOfEventInfos = 1; testData.NumberOfCabs = 1; testData.UseLargeCab = false; TestManager.CreateTestIndex(errorIndex, testData); ScriptManager scriptManager = new ScriptManager(m_TempPath + "Scripts"); Assert.AreEqual(scriptManager.NumberOfAutoScripts, scriptManager.ScriptNames.Count); ScriptResultsManager scriptResultsManager = new ScriptResultsManager(errorIndex, scriptManager, m_Debugger, m_DebuggerSettings); // Set up parameters for the task. AnalyzeTaskParameters analyzeParams = new AnalyzeTaskParameters(); // Standard task parameters. analyzeParams.IsBackgroundTask = true; analyzeParams.Name = "TestRunOneTask"; analyzeParams.RunInParallel = false; analyzeParams.UseSeparateThread = true; analyzeParams.AnalysisSettings = new StackHashAnalysisSettings(); analyzeParams.AnalysisSettings.ForceRerun = true; analyzeParams.ContextId = 0; analyzeParams.ClientData = new StackHashClientData(Guid.NewGuid(), "MarkJ", 1); analyzeParams.Debugger = m_Debugger; analyzeParams.DebuggerSettings = m_DebuggerSettings; analyzeParams.TheScriptManager = scriptManager; analyzeParams.TheScriptResultsManager = scriptResultsManager; int productId = 1; analyzeParams.ProductsToSynchronize = new StackHashProductSyncDataCollection(); analyzeParams.ProductsToSynchronize.Add(new StackHashProductSyncData(productId)); analyzeParams.ErrorIndex = errorIndex; // Create the task and run it. AnalyzeTask analyzeTask = new AnalyzeTask(analyzeParams); TaskManager taskManager = new TaskManager("Test"); taskManager.Enqueue(analyzeTask); taskManager.WaitForTaskCompletion(analyzeTask, s_TaskTimeout); StackHashProductCollection products = errorIndex.LoadProductList(); StackHashFileCollection files = errorIndex.LoadFileList(products[0]); StackHashEventCollection events = errorIndex.LoadEventList(products[0], files[0]); StackHashCabCollection cabs = errorIndex.LoadCabList(products[0], files[0], events[0]); StackHashScriptResult script1 = scriptResultsManager.GetResultFileData(products[0], files[0], events[0], cabs[0], "AutoScript"); // Wait for 1 second - so file time granularity exceeded. Thread.Sleep(1000); // Change the version on the autoscript. StackHashScriptSettings settings = scriptManager.LoadScript("AutoScript"); settings.LastModifiedDate = DateTime.Now; scriptManager.AddScript(settings, true, true); // Now run the task again. analyzeTask = new AnalyzeTask(analyzeParams); taskManager.Enqueue(analyzeTask); taskManager.WaitForTaskCompletion(analyzeTask, s_TaskTimeout); Assert.AreEqual(true, analyzeTask.CurrentTaskState.TaskCompleted); // Refresh the cab list data. cabs = errorIndex.LoadCabList(products[0], files[0], events[0]); StackHashDumpAnalysis analysis = cabs[0].DumpAnalysis; Assert.AreEqual("not available", analysis.SystemUpTime); Assert.AreEqual("0 days 0:00:15.000", analysis.ProcessUpTime); Assert.AreEqual("2.0.50727.3603", analysis.DotNetVersion); StackHashScriptResult script2 = scriptResultsManager.GetResultFileData(products[0], files[0], events[0], cabs[0], "AutoScript"); Assert.AreEqual(true, script2.RunDate > script1.RunDate); }
/// <summary> /// Parses the database looking for cabs to unwrap. /// </summary> private void analyzeAllCabs() { IErrorIndex index = m_TaskParameters.ErrorIndex; // Get a list of products. StackHashProductCollection products = index.LoadProductList(); foreach (StackHashProduct product in products) { if (CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Task aborted"); } if (!shouldProcessProduct(product)) { continue; } // Get the file list. StackHashFileCollection files = index.LoadFileList(product); foreach (StackHashFile file in files) { if (CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Task aborted"); } if (!shouldProcessFile(product, file)) { continue; } // Get the event data. StackHashEventCollection events = index.LoadEventList(product, file); foreach (StackHashEvent theEvent in events) { if (CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Task aborted"); } if (!shouldProcessEvent(product, file, theEvent)) { continue; } StackHashCabCollection cabs = index.LoadCabList(product, file, theEvent); // Process each cab. foreach (StackHashCab cab in cabs) { if (CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Task aborted"); } if (!shouldProcessCab(product, file, theEvent, cab)) { continue; } processCab(product, file, theEvent, cab); } } } } }
public override void EntryPoint() { try { IErrorIndex errorIndex = m_TaskParameters.ErrorIndex; SetTaskStarted(m_TaskParameters.ErrorIndex); StackHashUtilities.SystemInformation.DisableSleep(); ErrorIndexEventParser parser = new ErrorIndexEventParser(); parser.ParseEvent += new EventHandler <ErrorIndexParseEventsEventArgs>(this.processEvent); DateTime purgeDate = DateTime.Now.ToUniversalTime().AddDays(-1 * m_TaskParameters.PurgeOptions.FindMostRecentPurgeAge()); try { // Get the list of products. StackHashProductCollection products = errorIndex.LoadProductList(); foreach (StackHashProduct product in products) { if (this.CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Purging product"); } // Get the files associated with this product. StackHashFileCollection files = errorIndex.LoadFileList(product); foreach (StackHashFile file in files) { if (this.CurrentTaskState.AbortRequested) { throw new OperationCanceledException("Purging file"); } // Now parse the events one at a time. Instead of getting a list of all the events - use a // callback to analyze each event. Note this allows for an abort by returning false // from the callback. parser.Product = product; parser.File = file; parser.SearchCriteriaCollection = new StackHashSearchCriteriaCollection() { new StackHashSearchCriteria( new StackHashSearchOptionCollection() { new IntSearchOption(StackHashObjectType.Product, "Id", StackHashSearchOptionType.Equal, product.Id, 0), new IntSearchOption(StackHashObjectType.File, "Id", StackHashSearchOptionType.Equal, file.Id, 0), new DateTimeSearchOption(StackHashObjectType.CabInfo, "DateCreatedLocal", StackHashSearchOptionType.LessThanOrEqual, purgeDate, purgeDate), new IntSearchOption(StackHashObjectType.CabInfo, "Purged", StackHashSearchOptionType.Equal, 0, 0), // 0 is false. new IntSearchOption(StackHashObjectType.CabInfo, "CabDownloaded", StackHashSearchOptionType.Equal, 1, 0), // 1 is true. }) }; if (!errorIndex.ParseEvents(product, file, parser)) { throw new OperationCanceledException("Aborted while purging events"); } } } } finally { parser.ParseEvent -= new EventHandler <ErrorIndexParseEventsEventArgs>(this.processEvent); } } catch (Exception ex) { DiagnosticsHelper.LogException(DiagSeverity.Information, "Purge task failed", ex); LastException = ex; } finally { StackHashUtilities.SystemInformation.EnableSleep(); SetTaskCompleted(m_TaskParameters.ErrorIndex); } }