private static void Exec(Options o) { Console.WriteLine($"XEL Console"); if (!File.Exists(o.InputFileName)) { Console.WriteLine($"ERROR: Input file {o.InputFileName} not exists"); return; } Console.WriteLine($"Parsing {o.InputFileName} ..."); var outDir = Path.GetDirectoryName(o.OutputFileName); if (!Directory.Exists(outDir)) { Console.WriteLine($"ERROR: Output directory {outDir} not exists"); return; } using (var outputStream = File.CreateText(o.OutputFileName)) { WriteFileStart(o, outputStream); var xelStream = new XEFileEventStreamer(o.InputFileName); xelStream.ReadEventStream( xevent => { switch (o.Format) { case ExportFormat.txt: ExportTxt(xevent, outputStream); break; case ExportFormat.html: ExportHtml(xevent, outputStream); break; case ExportFormat.json: default: ExportJson(xevent, outputStream); break; } return(Task.CompletedTask); }, CancellationToken.None).Wait(); WriteFileEnd(o, outputStream); } Console.WriteLine($"Exported as {o.Format} to {o.OutputFileName}"); }
public async Task <long> Count(CancellationToken cancellationToken) { long eventCount = 0; foreach (var logFile in _logFilesWithData) { using (Stream logFileStream = new FileStream(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite, 81920, false)) { XEFileEventStreamer xeReader = new XEFileEventStreamer(logFileStream); await xeReader.ReadEventStream((eventData) => { eventCount++; return(Task.CompletedTask); }, CancellationToken.None); } } return(eventCount); }
public void Process() { if (!File.Exists(InputFilePath)) { return; } string inputFileName = Path.GetFileName(InputFilePath); Console.WriteLine($"{inputFileName}"); string extension = Path.GetExtension(InputFilePath); var xeStream = new XEFileEventStreamer(InputFilePath); xeStream.ReadEventStream( xevent => { Console.WriteLine(xevent); Console.WriteLine(""); return(Task.CompletedTask); }, CancellationToken.None).Wait(); }
static async Task Main(string[] args) { // Create a BlobServiceClient object which will be used to create a container client BlobServiceClient blobServiceClient = new BlobServiceClient(connectionString); BlobContainerClient containerClient = blobServiceClient.GetBlobContainerClient(containerName); await foreach (BlobItem blobItem in containerClient.GetBlobsAsync()) { var blobNameSplit = blobItem.Name.Split("/"); var blobDate = DateTime.Parse(blobNameSplit[3]); // Structure based on servername/databasename/audit_name/date/filename.xel if ( blobNameSplit[0] == serverName && blobNameSplit[1] == databaseName && blobItem.Name.EndsWith(".xel") && blobDate >= startDate && blobDate <= endDate ) { Console.WriteLine("\t" + blobItem.Name); //Assuming we meet all the filtering criteria, parse and output to log analytics BlobClient blobClient = containerClient.GetBlobClient(blobItem.Name); BlobDownloadInfo download = await blobClient.DownloadAsync(); string downloadFilePath = "holdinglog.xel"; using (FileStream downloadFileStream = File.OpenWrite(downloadFilePath)) { await download.Content.CopyToAsync(downloadFileStream); downloadFileStream.Close(); } List <IReadOnlyDictionary <string, object> > events = new List <IReadOnlyDictionary <string, object> >(); XEFileEventStreamer xelreader = new XEFileEventStreamer(downloadFilePath); xelreader.ReadEventStream( () => { return(Task.CompletedTask); }, xevent => { events.Add(xevent.Fields); return(Task.CompletedTask); }, CancellationToken.None).Wait(); string json = JsonSerializer.Serialize(events); var jsonBytesSize = Encoding.UTF8.GetBytes(json).Length; if (jsonBytesSize > 30000000) { splitAndPost(events); } else { post(json); } File.Delete(downloadFilePath); } } }
internal async Task <Run> PreProcess(string[] fileNames, string connectionString, DateTimeOffset?cutoff) { var sessions = new ConcurrentDictionary <string, Session>(); using (var con = new SqlConnection(connectionString)) { await con.OpenAsync(); foreach (string fileName in fileNames) { var xeStream = new XEFileEventStreamer(fileName); await xeStream.ReadEventStream(xevent => { if (xevent.Actions["database_name"].ToString() != con.Database) { return(Task.CompletedTask); } if (cutoff != null && xevent.Timestamp > cutoff) { return(Task.CompletedTask); } Event evt = null; if (xevent.Name == "rpc_starting" && xevent.Fields["object_name"].ToString() != "sp_reset_connection" && !xevent.Fields["statement"].ToString().StartsWith("exec sp_unprepare ")) { evt = new Rpc() { EventSequence = xevent.Actions["event_sequence"].ToString(), TransactionId = xevent.Actions["transaction_id"].ToString(), Statement = xevent.Fields["statement"].ToString(), ObjectName = xevent.Fields["object_name"].ToString(), Timestamp = xevent.Timestamp }; //Build parameters so we can replay statements as ADO.NET CommandType.StoredProcedure calls in order to avoid extra compilations of raw statement LoadParameters(con, (Rpc)evt); } else if (xevent.Name == "sql_transaction") { if (xevent.Fields["transaction_type"].ToString() == "User") { evt = new Transaction() { EventSequence = xevent.Actions["event_sequence"].ToString(), TransactionId = xevent.Fields["transaction_id"].ToString(), TransactionState = xevent.Fields["transaction_state"].ToString(), Timestamp = xevent.Timestamp }; } } else if (xevent.Name == "sql_batch_starting" && xevent.Fields["batch_text"].ToString().Contains("insert bulk")) { var bulkInsert = new BulkInsert() { EventSequence = xevent.Actions["event_sequence"].ToString(), TransactionId = xevent.Actions["transaction_id"].ToString(), BatchText = xevent.Fields["batch_text"].ToString(), Timestamp = xevent.Timestamp }; bulkInsert.Table = bulkInsert.BatchText.Split(' ')[2]; string[] columns = bulkInsert.BatchText.GetParenthesesContent().Split(", "); foreach (string col in columns) { string[] columnInfo = col.Split(' '); bulkInsert.Columns.Add(new Column { Name = columnInfo[0], DataType = columnInfo[1] }); } if (bulkInsert.BatchText.Contains(" with (")) { string[] settings = bulkInsert.BatchText .GetParenthesesContent(bulkInsert.BatchText.IndexOf(" with (") + 6).Split(", "); foreach (string setting in settings) { if (setting == "FIRE_TRIGGERS") { bulkInsert.FireTriggers = true; break; } } } evt = bulkInsert; } else if (xevent.Name == "sql_batch_completed" && xevent.Fields["batch_text"].ToString().Contains("insert bulk")) { if (!sessions.TryGetValue(xevent.Actions["session_id"].ToString(), out var session)) { throw new Exception( $"Could not find session ID {xevent.Actions["session_id"].ToString()} for bulk insert."); } else { var bulkInsert = (BulkInsert)session.Events .FirstOrDefault(e => (e as BulkInsert)?.TransactionId == xevent.Actions["transaction_id"].ToString() && (e as BulkInsert)?.BatchText == xevent.Fields["batch_text"].ToString()); if (bulkInsert != null) { bulkInsert.Rows = int.Parse(xevent.Fields["row_count"].ToString()); } } } if (evt != null) { string sessionId = xevent.Actions["session_id"].ToString(); Session session = sessions.GetOrAdd(sessionId, new Session() { SessionId = sessionId }); session.Events.Add(evt); } return(Task.CompletedTask); }, CancellationToken.None); } } foreach (Session session in sessions.Values) { session.Events = session.Events.OrderBy(e => e.EventSequence).ToList(); } var run = new Run() { Sessions = sessions.Values.ToArray().Where(s => s.Events.Count > 0).OrderBy(s => s.Events.First().EventSequence).ToList() }; run.EventCaptureOrigin = run.Sessions.First().Events.First().Timestamp; return(run); }
internal async Task <Run> PreProcessAsync(string[] filePaths, string connectionString) { var sessionDictionary = new ConcurrentDictionary <string, Session>(); using (var con = new SqlConnection(connectionString)) { await con.OpenAsync(); foreach (string filePath in filePaths) { if (!Regex.IsMatch(Path.GetFileName(filePath), @"^\w+\.xel$", RegexOptions.IgnoreCase)) { //ignore any files that don't fit the pattern of an XE file continue; } var xeStream = new XEFileEventStreamer(filePath); await xeStream.ReadEventStream(xevent => { if (xevent.Actions["database_name"].ToString() != con.Database) { return(Task.CompletedTask); } Event evt = null; if (xevent.Name == "rpc_starting" && xevent.Fields["object_name"].ToString() != "sp_reset_connection" && !xevent.Fields["statement"].ToString().StartsWith("exec sp_unprepare ")) { evt = new Rpc() { EventSequence = long.Parse(xevent.Actions["event_sequence"].ToString()), TransactionId = xevent.Actions["transaction_id"].ToString(), Statement = xevent.Fields["statement"].ToString(), ObjectName = xevent.Fields["object_name"].ToString(), Timestamp = xevent.Timestamp }; //Build parameters so we can replay statements as ADO.NET CommandType.StoredProcedure calls in order to avoid extra compilations of raw statement LoadParameters(con, (Rpc)evt); } else if (xevent.Name == "sql_transaction") { if (xevent.Fields["transaction_type"].ToString() == "User") { evt = new Transaction() { EventSequence = long.Parse(xevent.Actions["event_sequence"].ToString()), TransactionId = xevent.Fields["transaction_id"].ToString(), TransactionState = xevent.Fields["transaction_state"].ToString(), Timestamp = xevent.Timestamp }; } } else if (xevent.Name == "sql_batch_starting" && xevent.Fields["batch_text"].ToString().Contains("insert bulk")) { var bulkInsert = new BulkInsert() { EventSequence = long.Parse(xevent.Actions["event_sequence"].ToString()), TransactionId = xevent.Actions["transaction_id"].ToString(), BatchText = xevent.Fields["batch_text"].ToString(), Timestamp = xevent.Timestamp }; bulkInsert.Table = bulkInsert.BatchText.Split(' ')[2]; if (bulkInsert.Table.Contains('(')) { bulkInsert.Table = bulkInsert.Table.Substring(0, bulkInsert.Table.IndexOf('(')); } string[] columns = bulkInsert.BatchText.GetParenthesesContent().Split(", "); foreach (string col in columns) { string[] columnInfo = col.Split(' '); bulkInsert.Columns.Add(new Column { Name = columnInfo[0], SqlDbType = GetSqlDbType(columnInfo[1]) }); } if (bulkInsert.BatchText.Contains(" with (")) { string[] settings = bulkInsert.BatchText .GetParenthesesContent(bulkInsert.BatchText.IndexOf(" with (") + 6).Split(", "); foreach (string setting in settings) { if (setting == "CHECK_CONSTRAINTS") { bulkInsert.CheckConstraints = true; } else if (setting == "FIRE_TRIGGERS") { bulkInsert.FireTriggers = true; break; } } } evt = bulkInsert; } else if (xevent.Name == "sql_batch_completed" && xevent.Fields["batch_text"].ToString().Contains("insert bulk")) { if (!sessionDictionary.TryGetValue(xevent.Actions["session_id"].ToString(), out var session)) { throw new Exception( $"Could not find session ID {xevent.Actions["session_id"].ToString()} for bulk insert."); } var bulkInsert = (BulkInsert)session.Events .FirstOrDefault(e => (e as BulkInsert)?.TransactionId == xevent.Actions["transaction_id"].ToString() && (e as BulkInsert)?.BatchText == xevent.Fields["batch_text"].ToString()); if (bulkInsert != null) { bulkInsert.RowCount = int.Parse(xevent.Fields["row_count"].ToString()); AddBulkInsertData(bulkInsert, con); } } if (evt != null) { string sessionId = xevent.Actions["session_id"].ToString(); Session session = sessionDictionary.GetOrAdd(sessionId, new Session() { SessionId = sessionId }); session.Events.Add(evt); } return(Task.CompletedTask); }, CancellationToken.None); } } var sessions = sessionDictionary.Values.ToList(); //Remove any sessions with no events sessions.RemoveAll(s => s.Events.Count == 0); foreach (Session session in sessions) { //Remove any bulk inserts where we never found a corresponding sql_batch_completed session.Events.RemoveAll(e => (e as BulkInsert)?.RowCount == 0); session.Events = session.Events.OrderBy(e => e.EventSequence).ToList(); } var run = new Run() { Sessions = sessions.Where(s => s.Events.Count > 0).OrderBy(s => s.Events.First().Timestamp).ToList() }; run.EventCaptureOrigin = run.Sessions.First().Events.First().Timestamp; return(run); }
/// <summary> /// Read a XEL file, consume all callstacks, hash them and return the equivalent XML /// </summary> /// <param name="xelFiles">List of paths to XEL files to read</param> /// <returns>XML equivalent of the histogram corresponding to these events</returns> public string ExtractFromXEL(string[] xelFiles, bool bucketize) { var callstackSlots = new Dictionary <string, long>(); var callstackRaw = new Dictionary <string, string>(); var xmlEquivalent = new StringBuilder(); // the below feels quite hacky. Unfortunately till such time that we have strong typing in XELite I believe this is necessary var relevantKeyNames = new string[] { "callstack", "call_stack", "stack_frames" }; foreach (var xelFileName in xelFiles) { if (File.Exists(xelFileName)) { var xeStream = new XEFileEventStreamer(xelFileName); xeStream.ReadEventStream( () => { return(Task.CompletedTask); }, evt => { var allStacks = (from actTmp in evt.Actions where relevantKeyNames.Contains(actTmp.Key.ToLower()) select actTmp.Value as string) .Union( from fldTmp in evt.Fields where relevantKeyNames.Contains(fldTmp.Key.ToLower()) select fldTmp.Value as string); foreach (var callStackString in allStacks) { if (string.IsNullOrEmpty(callStackString)) { continue; } if (bucketize) { lock (callstackSlots) { if (!callstackSlots.ContainsKey(callStackString)) { callstackSlots.Add(callStackString, 1); } else { callstackSlots[callStackString]++; } } } else { var evtId = string.Format("File: {0}, Timestamp: {1}, UUID: {2}:", xelFileName, evt.Timestamp.ToString("yyyy-MM-dd HH:mm:ss.mi"), evt.UUID); lock (callstackRaw) { callstackRaw.Add(evtId, callStackString); } } } return(Task.CompletedTask); }, CancellationToken.None).Wait(); } } if (bucketize) { xmlEquivalent.AppendLine("<HistogramTarget truncated=\"0\" buckets=\"256\">"); foreach (var item in callstackSlots.OrderByDescending(key => key.Value)) { xmlEquivalent.AppendFormat("<Slot count=\"{0}\"><value>{1}</value></Slot>", item.Value, item.Key); xmlEquivalent.AppendLine(); } xmlEquivalent.AppendLine("</HistogramTarget>"); } else { foreach (var item in callstackRaw) { xmlEquivalent.AppendLine(item.Key); xmlEquivalent.AppendLine(item.Value); } } return(xmlEquivalent.ToString()); }
public async Task StartReadEvents(CancellationToken cancellationToken) { while (CurrentFile != null) { if (!InitializeReadFileStream()) { return; } RaiseBeforeReadFileEvent(out bool cancelBeforeReadFile); if (cancelBeforeReadFile) { NextFile(); await StartReadEvents(cancellationToken); return; } _position = null; _currentRow = null; _currentFileEventNumber = 1; var currentLogFile = _logFilesWithData[_indexCurrentFile]; FileInfo currentLogFileInfo = new FileInfo(currentLogFile); try { XEFileEventStreamer xeReader = new XEFileEventStreamer(_stream); await xeReader.ReadEventStream(() => { _position = new ExtendedEventsPosition( _currentFileEventNumber, CurrentFile, null, null, false, currentLogFileInfo.CreationTimeUtc, currentLogFileInfo.LastWriteTimeUtc); RaiseOnReadMetadata(new OnReadMetadataArgs(_position)); return(Task.CompletedTask); }, (eventData) => { _position = new ExtendedEventsPosition( _currentFileEventNumber, CurrentFile, eventData.UUID.ToString(), eventData.Timestamp, false, currentLogFileInfo.CreationTimeUtc, currentLogFileInfo.LastWriteTimeUtc); _currentRow = new ExtendedEvent(_currentFileEventNumber, eventData); RaiseOnRead(new OnReadEventArgs(_currentRow, _position, _currentFileEventNumber)); _currentFileEventNumber++; return(Task.CompletedTask); }, cancellationToken); _currentRow = null; if (_position != null) { _position = new ExtendedEventsPosition( _position.EventNumber, _position.CurrentFileData, _position.EventUUID, _position.EventPeriod, true, currentLogFileInfo.CreationTimeUtc, currentLogFileInfo.LastWriteTimeUtc ); } RaiseOnRead(new OnReadEventArgs(_currentRow, _position, _currentFileEventNumber)); NextFile(); } catch (Exception ex) { RaiseOnError(new OnErrorEventArgs(ex)); break; } } }