public void CollectData() { logger = new CollectorLogger(SourceServerInstance, CollectionSetUid, ItemId); if (verbose) { logger.logMessage("--------------------------------"); } if (verbose) { logger.logMessage(" ExtendedXEReaderCollector "); } if (verbose) { logger.logMessage("--------------------------------"); } if (verbose) { logger.logMessage("Copyright© sqlconsulting.it 2014"); } if (verbose) { logger.logMessage("-"); } if (verbose) { logger.logMessage("Loading configuration"); } // // Load Configuration // cfg = new XEReaderCollectorConfig(); cfg.readFromDatabase(SourceServerInstance, CollectionSetUid, ItemId); String connectionString = String.Format(@"Data Source = {0}; Initial Catalog = master; Integrated Security = SSPI", SourceServerInstance); collectorThread = Thread.CurrentThread; Task.Factory.StartNew(() => checkCollectionSetEnabled()); if (verbose) { logger.logMessage("Entering collection items loop"); } foreach (CollectionItemConfig item in cfg.collectionItems) { XEReaderCollectionItemConfig itm = (XEReaderCollectionItemConfig)item; if (verbose) { logger.logMessage("Processing item n. " + itm.Index); } if (verbose) { logger.logMessage("Processing session " + itm.SessionDefinition); } var dataQueue = new ConcurrentQueue <DataTable>(); DateTime lastEventFlush = new DateTime(1900, 1, 1); CheckSession(itm); Task.Factory.StartNew(() => PerformWrite(dataQueue, itm)); // Queries an existing session Microsoft.SqlServer.XEvent.Linq.QueryableXEventData events = new QueryableXEventData( connectionString, itm.SessionName, EventStreamSourceOptions.EventStream, EventStreamCacheOptions.DoNotCache); foreach (PublishedEvent evt in events) { try { DataTable dt = ReadEvent(evt); // // Apply filter // DataView dw = new DataView(dt); dw.RowFilter = itm.Filter; dt = dw.ToTable(); // // Enqueue the collected data for the consumer thread // if (dt != null && dt.Rows.Count > 0) { dataQueue.Enqueue(dt); } // // Process rows to fire alerts if needed // foreach (AlertConfig currentAlert in itm.Alerts) { foreach (DataRow currentRow in dt.Select(currentAlert.Filter)) { //TODO: Process alerts ProcessAlert(currentAlert, currentRow); } } } catch (Exception e) { // capture the session related exceptions logger.logMessage(e.StackTrace); // try restarting the session event stream try { events = new QueryableXEventData( connectionString, itm.SessionName, EventStreamSourceOptions.EventStream, EventStreamCacheOptions.DoNotCache); } catch (Exception ex) { // Unable to restart the events stream logger.logMessage(ex.StackTrace); throw ex; } } } } logger.cleanupLogFiles(cfg.DaysUntilExpiration); }
public void CollectData( String SourceServerInstance, Guid CollectionSetUid, int ItemId ) { CollectorLogger logger = new CollectorLogger(SourceServerInstance, CollectionSetUid, ItemId); DataTable collectedData = null; if (verbose) { logger.logMessage("--------------------------------"); } if (verbose) { logger.logMessage(" ExtendedTSQLCollector "); } if (verbose) { logger.logMessage("--------------------------------"); } if (verbose) { logger.logMessage("Copyright© sqlconsulting.it 2014"); } if (verbose) { logger.logMessage("-"); } if (verbose) { logger.logMessage("Loading configuration"); } // // Load Configuration // TSQLCollectorConfig cfg = new TSQLCollectorConfig(); cfg.readFromDatabase(SourceServerInstance, CollectionSetUid, ItemId); if (verbose) { logger.logMessage("Entering collection items loop"); } foreach (CollectionItemConfig item in cfg.collectionItems) { TSQLCollectionItemConfig itm = (TSQLCollectionItemConfig)item; if (verbose) { logger.logMessage("Processing item n. " + itm.Index); } if (verbose) { logger.logMessage("Processing query " + itm.Query); } collectedData = null; String ts = DateTime.Now.ToString("yyyyMMddHHmmss"); String collectorId = CollectorUtils.getCacheFilePrefix(SourceServerInstance, CollectionSetUid, ItemId) + "_" + itm.Index; String destFile = Path.Combine(cfg.CacheDirectory, collectorId + "_" + ts + ".cache"); // // Iterate through the enabled databases // if (verbose) { logger.logMessage("Entering databases loop"); } foreach (String currentDatabase in cfg.Databases) { if (verbose) { logger.logMessage("Processing database " + currentDatabase); } // // Execute the query in the collection item // DataTable dt = CollectorUtils.GetDataTable(SourceServerInstance, currentDatabase, itm.Query); // // Add computed columns // if (dt.Columns.Contains("database_name")) { dt.Columns["database_name"].ColumnName = "__database_name"; } DataColumn cl_db = new DataColumn("database_name", typeof(String)); cl_db.DefaultValue = currentDatabase; dt.Columns.Add(cl_db); if (dt.Columns.Contains("collection_time")) { dt.Columns["collection_time"].ColumnName = "__collection_time"; } DataColumn cl_dt = new DataColumn("collection_time", typeof(DateTime)); cl_dt.DefaultValue = DateTime.Now; dt.Columns.Add(cl_dt); // // Merge collected data in a single DataTable // if (collectedData != null) { collectedData.Merge(dt); } else { collectedData = dt; collectedData.DataSet.RemotingFormat = System.Data.SerializationFormat.Binary; collectedData.RemotingFormat = System.Data.SerializationFormat.Binary; } } if (verbose) { logger.logMessage("Saving to cache file " + destFile); } // // Save data to a binary cache file // if (File.Exists(destFile)) { File.Delete(destFile); } System.Runtime.Serialization.Formatters.Binary.BinaryFormatter fm = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); using (FileStream fs = new FileStream(destFile, FileMode.CreateNew)) { fm.Serialize(fs, collectedData); fs.Close(); } if (verbose) { logger.logMessage("File saved successfully"); } } logger.cleanupLogFiles(cfg.DaysUntilExpiration); }