/// <summary> /// Create destination file name for filtered trace. /// </summary> /// <param name="etlFileName"></param> /// <param name="isActiveEtl"></param> /// <returns></returns> private string CreateDestinationFileName(string etlFileName, bool isActiveEtl) { string differentiator = string.Format( CultureInfo.InvariantCulture, "{0:D10}", isActiveEtl ? this.lastEventIndexProcessed.TimestampDifferentiator : int.MaxValue); string traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1}_{2:D20}_{3}.", this.fabricNodeId, Path.GetFileNameWithoutExtension(etlFileName), this.lastEventIndexProcessed.Timestamp.Ticks, differentiator); var applicationInstanceId = ContainerEnvironment.GetContainerApplicationInstanceId(this.logSourceId); if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { // Note that the a hash of the applicationInstanceId is being used to reduce file name length in around 70 characters // This is done to workaround PathTooLong exception in FileUploaderBase.cs since we don't have an interop for FileSystemWatcher // and .NET 4.5 used does not support long paths yet. traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1:X8}_{2}_{3:D20}_{4}.", this.fabricNodeId, Path.GetFileName(applicationInstanceId).GetHashCode(), Path.GetFileNameWithoutExtension(etlFileName), this.lastEventIndexProcessed.Timestamp.Ticks, differentiator); } return(string.Concat( traceFileNamePrefix, EtlConsumerConstants.FilteredEtwTraceFileExtension)); }
private void CopyTraceFileForUpload(string fileName) { // Build the destination name for the filtered trace file // // !!! WARNING !!! // The trace viewer tool parses the file names of the filtered trace files. // Changing the file name format might require a change to trace viewer as well. // !!! WARNING !!! // // If the ETL file is an active ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<TimestampDifferentiatorOfLastEventProcessed>.dtr // // If the ETL file is an inactive ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<Int32.MaxValue>.dtr // // Using Int32.MaxValue as a component of the trace file name makes // it easy to identify gaps in the filtered traces if the DCA is // falling behind on trace processing. Recall that an inactive ETL // file is always processed fully. Only active ETL files are processed // in chunks. Therefore, the presence of Int32.MaxValue indicates that // the corresponding ETL file is inactive and has been fully processed // by the DCA. Thus, gaps **within** an ETL file (i.e. unprocessed // chunks within the file) can be identified by the absence of a file // containing Int32.MaxValue in its name. // // It is also worth noting that ETL file names are sequentially // numbered, which helps in identifying gaps **between** ETL files // (i.e. ETL files that were not processed at all). And the use of // Int32.MaxValue is an enhancement that enables us to identify gaps // within an ETL file. Using these two concepts, we can look at a set // of filtered trace files and determine whether they are complete. // And if not complete, we can also identify where all the gaps are. string differentiator = string.Format( CultureInfo.InvariantCulture, "{0:D10}", 0); string newTraceFileName = ""; long lastEventTicks; try { if (logStartTime == 0) { // We just need the start of log tracing file generation time stamp, // for alligning with etw logs //this.logStartTime = DateTime.Parse(Path.GetFileNameWithoutExtension(fileName).Replace("_", " ")).Ticks; this.logStartTime = GetFirstEventTicks(fileName); } lastEventTicks = GetLastEventTicks(fileName); } catch (Exception e) { var fileNameDiscard = fileName + ".discard"; this.traceSource.WriteExceptionAsWarning( this.logSourceId, e, "Could not create filename for trace files for upload. Renaming file to {0}", fileNameDiscard); // Rename the file and do not process it. try { // Delete the file so that storage is not blocked FabricFile.Delete(fileName); } catch (Exception ex) { this.traceSource.WriteExceptionAsWarning( this.logSourceId, ex, "Failed to rename file to {0}", fileNameDiscard); } return; } // Create the filename which TraceViewer understands newTraceFileName = string.Format( CultureInfo.InvariantCulture, "fabric_traces_{0}_{1}_{2:D6}", fabricVersion, this.logStartTime, 1); string traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1}_{2:D20}_{3}.", this.fabricNodeId, newTraceFileName, lastEventTicks, differentiator); var applicationInstanceId = ContainerEnvironment.GetContainerApplicationInstanceId(this.logSourceId); if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { // Note that the a hash of the applicationInstanceId is being used to reduce file name length in around 70 characters // This is done to workaround PathTooLong exception in FileUploaderBase.cs since we don't have an interop for FileSystemWatcher // and .NET 4.5 used does not support long paths yet. traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1:X8}_{2}_{3:D20}_{4}.", this.fabricNodeId, Path.GetFileName(applicationInstanceId).GetHashCode(), newTraceFileName, lastEventTicks, differentiator); } string traceFileNameWithoutPath = string.Concat( traceFileNamePrefix, "dtr"); string compressedTraceFileNameWithoutPath = string.Concat( traceFileNamePrefix, "dtr.zip"); string subFolder = GetTraceFileSubFolder(fileName); string traceFileDestinationPath = Path.Combine( this.csvFolder, subFolder); string traceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? compressedTraceFileNameWithoutPath : traceFileNameWithoutPath); string alternateTraceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? traceFileNameWithoutPath : compressedTraceFileNameWithoutPath); try { InternalFileSink.CopyFile(fileName, traceFileDestinationName, false, this.compressCsvFiles); FabricFile.Delete(fileName); this.traceSource.WriteInfo( this.logSourceId, "Traces are ready. They have been moved from {0} to {1}.", fileName, traceFileDestinationName); } catch (Exception e) { this.traceSource.WriteExceptionAsError( this.logSourceId, e, "Failed to move file from {0} to {1}.", fileName, traceFileDestinationName); } }
private void CopyTraceFileForUpload(string fileName, bool isActiveEtl) { // If the temporary trace file does not contain events and if the ETL // file that is being processed is an active ETL file, then don't // copy the temporary file to the ETW CSV directory. // // In contrast, temporary trace files corresponding to inactive ETL // files are copied even if they don't have events in them (i.e. a // zero-byte file is copied) because it makes it easier to identify // gaps in traces if the DCA happens to fall behind on trace processing. // For details, please see comments later in this function. if (0 == this.fileSink.WriteStatistics.EventsWritten && isActiveEtl) { return; } // Build the destination name for the filtered trace file // // !!! WARNING !!! // The trace viewer tool parses the file names of the filtered trace files. // Changing the file name format might require a change to trace viewer as well. // !!! WARNING !!! // // If the ETL file is an active ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<TimestampDifferentiatorOfLastEventProcessed>.dtr // For containers the file structure is: // <FabricNodeID>_<ContainerName>_<etlFileName>_<TimestampOfLastEventProcessed>_<TimestampDifferentiatorOfLastEventProcessed>.dtr // // If the ETL file is an inactive ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<Int32.MaxValue>.dtr // For containers the file structure is: // <FabricNodeID>_<ContainerName>_<etlFileName>_<TimestampOfLastEventProcessed>_<Int32.MaxValue>.dtr // // Using Int32.MaxValue as a component of the trace file name makes // it easy to identify gaps in the filtered traces if the DCA is // falling behind on trace processing. Recall that an inactive ETL // file is always processed fully. Only active ETL files are processed // in chunks. Therefore, the presence of Int32.MaxValue indicates that // the corresponding ETL file is inactive and has been fully processed // by the DCA. Thus, gaps **within** an ETL file (i.e. unprocessed // chunks within the file) can be identified by the absence of a file // containing Int32.MaxValue in its name. // // It is also worth noting that ETL file names are sequentially // numbered, which helps in identifying gaps **between** ETL files // (i.e. ETL files that were not processed at all). And the use of // Int32.MaxValue is an enhancement that enables us to identify gaps // within an ETL file. Using these two concepts, we can look at a set // of filtered trace files and determine whether they are complete. // And if not complete, we can also identify where all the gaps are. string differentiator = string.Format( CultureInfo.InvariantCulture, "{0:D10}", isActiveEtl ? this.lastEventIndex.TimestampDifferentiator : int.MaxValue); string traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1}_{2:D20}_{3}.", this.fabricNodeId, Path.GetFileNameWithoutExtension(fileName), this.lastEventIndex.Timestamp.Ticks, differentiator); var applicationInstanceId = ContainerEnvironment.GetContainerApplicationInstanceId(this.LogSourceId); if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { // Note that the a hash of the applicationInstanceId is being used to reduce file name length in around 70 characters // This is done to workaround PathTooLong exception in FileUploaderBase.cs since we don't have an interop for FileSystemWatcher // and .NET 4.5 used does not support long paths yet. traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1:X8}_{2}_{3:D20}_{4}.", this.fabricNodeId, Path.GetFileName(applicationInstanceId).GetHashCode(), Path.GetFileNameWithoutExtension(fileName), this.lastEventIndex.Timestamp.Ticks, differentiator); } string traceFileNameWithoutPath = string.Concat( traceFileNamePrefix, EtlConsumerConstants.FilteredEtwTraceFileExtension); string compressedTraceFileNameWithoutPath = string.Concat( traceFileNamePrefix, EtlConsumerConstants.FilteredEtwTraceFileExtension, EtlConsumerConstants.CompressedFilteredEtwTraceFileExtension); string subFolder = this.GetTraceFileSubFolder(fileName); string traceFileDestinationPath = Path.Combine( this.filteredTraceDirName, subFolder); string traceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? compressedTraceFileNameWithoutPath : traceFileNameWithoutPath); string alternateTraceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? traceFileNameWithoutPath : compressedTraceFileNameWithoutPath); // If a file with the same name already exists at the destination, // then don't copy the file over. This is because if the file already // existed at the destination, then the file that are about to copy // over must be a zero-byte file because we always ignore events that // we have already processed. Therefore, we don't want to overwrite // a file that contains events with a zero-byte file. if (InternalFileSink.FileExists(traceFileDestinationName) || InternalFileSink.FileExists(alternateTraceFileDestinationName)) { Debug.Assert(0 == this.fileSink.WriteStatistics.EventsWritten, "The temporary trace file must be a zero-byte file."); // Also, the ETL file must be an inactive ETL file because if it // had been an active ETL file and the temporary trace file was // empty, then we would have already returned from this method // due to a check made at the beginning of this method. Debug.Assert(false == isActiveEtl, "File must be inactive."); return; } // Copy the file try { InternalFileSink.CopyFile(this.fileSink.TempFileName, traceFileDestinationName, false, this.compressCsvFiles); // logging bytes read and written var fileInfo = new FileInfo(this.fileSink.TempFileName); #if !DotNetCoreClr this.perfHelper.BytesRead(fileInfo.Length); this.perfHelper.BytesWritten(fileInfo.Length); #endif this.TraceSource.WriteInfo( this.LogSourceId, "Filtered traces from {0} are ready. They have been moved from {1} to {2}.", fileName, this.fileSink.TempFileName, traceFileDestinationName); } catch (Exception e) { // Log an error and move on this.TraceSource.WriteExceptionAsError( this.LogSourceId, e, "Failed to copy file. Source: {0}, destination: {1}.", this.fileSink.TempFileName, traceFileDestinationName); } }
private static Dictionary <string, IDcaConsumer> CreateConsumers( ConsumerFactory consumerFactory, IDictionary <string, List <object> > producerConsumerMap, DCASettings settings, DiskSpaceManager diskSpaceManager, string applicationInstanceId, IList <string> errorEvents) { // Initialize consumer instance list var consumers = new Dictionary <string, IDcaConsumer>(); foreach (string consumerInstance in settings.ConsumerInstances.Keys) { // Get the consumer instance information DCASettings.ConsumerInstanceInfo consumerInstanceInfo = settings.ConsumerInstances[consumerInstance]; // Prepare the consumer initialization parameters var initParam = new ConsumerInitializationParameters( applicationInstanceId, consumerInstanceInfo.SectionName, Utility.FabricNodeId, Utility.FabricNodeName, Utility.LogDirectory, Utility.DcaWorkFolder, diskSpaceManager); // if the application is a container move to container log folder. if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { initParam = new ConsumerInitializationParameters( applicationInstanceId, consumerInstanceInfo.SectionName, Utility.FabricNodeId, Utility.FabricNodeName, ContainerEnvironment.GetContainerLogFolder(applicationInstanceId), Utility.DcaWorkFolder, diskSpaceManager); } // Create consumer instance IDcaConsumer consumerInterface; try { consumerInterface = consumerFactory.CreateConsumer( consumerInstance, initParam, consumerInstanceInfo.TypeInfo.AssemblyName, consumerInstanceInfo.TypeInfo.TypeName); } catch (Exception e) { // We should continue trying to create other consumers. errorEvents.Add(e.Message); continue; } // Get the consumer's data sink object sink = consumerInterface.GetDataSink(); if (null == sink) { // The consumer does not wish to provide a data sink. // One situation this might happen is if the consumer has been // disabled. This is not an error, so just move on to the next // consumer. continue; } // Add the data sink to the corresponding producer's consumer sink list string producerInstance = consumerInstanceInfo.ProducerInstance; Debug.Assert(false == string.IsNullOrEmpty(producerInstance), "Consumers must be tied to a producer"); if (false == producerConsumerMap.ContainsKey(producerInstance)) { producerConsumerMap[producerInstance] = new List <object>(); } producerConsumerMap[producerInstance].Add(sink); // Add the consumer to the consumer list consumers[consumerInstance] = consumerInterface; } return(consumers); }
private static Dictionary <string, IDcaProducer> CreateProducers( ProducerFactory producerFactory, IDictionary <string, List <object> > producerConsumerMap, DCASettings settings, string applicationInstanceId, IList <string> errorEvents) { // Initialize producer instance list var producers = new Dictionary <string, IDcaProducer>(); Debug.Assert(null != producerConsumerMap, "Map of producers to consumers must be initialized."); foreach (string producerInstance in settings.ProducerInstances.Keys) { // Get the producer instance information DCASettings.ProducerInstanceInfo producerInstanceInfo = settings.ProducerInstances[producerInstance]; // Prepare the producer initialization parameters ProducerInitializationParameters initParam = new ProducerInitializationParameters(); initParam.ApplicationInstanceId = applicationInstanceId; initParam.SectionName = producerInstanceInfo.SectionName; initParam.LogDirectory = Utility.LogDirectory; if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { if (producerInstanceInfo.TypeName != StandardPluginTypes.EtlFileProducer) { continue; } initParam.LogDirectory = ContainerEnvironment.GetContainerLogFolder(applicationInstanceId); } initParam.WorkDirectory = Utility.DcaWorkFolder; if (producerConsumerMap.ContainsKey(producerInstance)) { initParam.ConsumerSinks = producerConsumerMap[producerInstance]; producerConsumerMap.Remove(producerInstance); } else { initParam.ConsumerSinks = null; } // Create producer instance try { var producerInterface = producerFactory.CreateProducer( producerInstance, initParam, producerInstanceInfo.TypeName); // Add the producer to the producer list producers[producerInstance] = producerInterface; } catch (Exception e) { // Initialize the producer Utility.TraceSource.WriteError( TraceType, "Failed to create producer {0}. {1}", producerInstance, e); var message = string.Format( StringResources.DCAError_UnhandledPluginExceptionHealthDescription, producerInstanceInfo.SectionName, e.Message); errorEvents.Add(message); } } return(producers); }
private bool TryGetContainerTraceFolder(out string containerTraceFolder) { containerTraceFolder = null; var applicationInstanceId = ContainerEnvironment.GetContainerApplicationInstanceId(this.logSourceId); if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { string containerTraceFolderParent = Path.Combine( ContainerEnvironment.GetContainerLogFolder(applicationInstanceId), LttProducerConstants.LttSubDirectoryUnderLogDirectory); IEnumerable <string> traceFolders = null; int numberOfTraceFolders; try { traceFolders = Directory.EnumerateDirectories( containerTraceFolderParent, $"{LttProducerConstants.LttTraceSessionFolderNamePrefix}*"); numberOfTraceFolders = traceFolders.Count(); } catch (OverflowException) { this.traceSource.WriteWarning(this.logSourceId, $"Number of container trace folders found is too large."); numberOfTraceFolders = int.MaxValue; } catch (Exception e) { this.traceSource.WriteExceptionAsError(this.logSourceId, e, $"Exception when trying to get container trace folder"); return(false); } if (traceFolders == null) { return(false); } switch (numberOfTraceFolders) { case 0: this.traceSource.WriteWarning(this.logSourceId, "No LTTng traces folder found for container."); containerTraceFolder = null; break; case 1: containerTraceFolder = traceFolders.First(); this.traceSource.WriteInfo(this.logSourceId, $"Processing LTTng traces from container at: {containerTraceFolder}"); break; default: containerTraceFolder = traceFolders.FirstOrDefault(); this.traceSource.WriteWarning(this.logSourceId, $"Found {numberOfTraceFolders} container trace folders. Using {containerTraceFolder}"); break; } return(true); } return(false); }