private void CopyTraceFileForUpload(string fileName) { // Build the destination name for the filtered trace file // // !!! WARNING !!! // The trace viewer tool parses the file names of the filtered trace files. // Changing the file name format might require a change to trace viewer as well. // !!! WARNING !!! // // If the ETL file is an active ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<TimestampDifferentiatorOfLastEventProcessed>.dtr // // If the ETL file is an inactive ETL file, the trace file name is of the form: // <FabricNodeID>_<etlFileName>_<TimestampOfLastEventProcessed>_<Int32.MaxValue>.dtr // // Using Int32.MaxValue as a component of the trace file name makes // it easy to identify gaps in the filtered traces if the DCA is // falling behind on trace processing. Recall that an inactive ETL // file is always processed fully. Only active ETL files are processed // in chunks. Therefore, the presence of Int32.MaxValue indicates that // the corresponding ETL file is inactive and has been fully processed // by the DCA. Thus, gaps **within** an ETL file (i.e. unprocessed // chunks within the file) can be identified by the absence of a file // containing Int32.MaxValue in its name. // // It is also worth noting that ETL file names are sequentially // numbered, which helps in identifying gaps **between** ETL files // (i.e. ETL files that were not processed at all). And the use of // Int32.MaxValue is an enhancement that enables us to identify gaps // within an ETL file. Using these two concepts, we can look at a set // of filtered trace files and determine whether they are complete. // And if not complete, we can also identify where all the gaps are. string differentiator = string.Format( CultureInfo.InvariantCulture, "{0:D10}", 0); string newTraceFileName = ""; long lastEventTicks; try { if (logStartTime == 0) { // We just need the start of log tracing file generation time stamp, // for alligning with etw logs //this.logStartTime = DateTime.Parse(Path.GetFileNameWithoutExtension(fileName).Replace("_", " ")).Ticks; this.logStartTime = GetFirstEventTicks(fileName); } lastEventTicks = GetLastEventTicks(fileName); } catch (Exception e) { var fileNameDiscard = fileName + ".discard"; this.traceSource.WriteExceptionAsWarning( this.logSourceId, e, "Could not create filename for trace files for upload. Renaming file to {0}", fileNameDiscard); // Rename the file and do not process it. try { // Delete the file so that storage is not blocked FabricFile.Delete(fileName); } catch (Exception ex) { this.traceSource.WriteExceptionAsWarning( this.logSourceId, ex, "Failed to rename file to {0}", fileNameDiscard); } return; } // Create the filename which TraceViewer understands newTraceFileName = string.Format( CultureInfo.InvariantCulture, "fabric_traces_{0}_{1}_{2:D6}", fabricVersion, this.logStartTime, 1); string traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1}_{2:D20}_{3}.", this.fabricNodeId, newTraceFileName, lastEventTicks, differentiator); var applicationInstanceId = ContainerEnvironment.GetContainerApplicationInstanceId(this.logSourceId); if (ContainerEnvironment.IsContainerApplication(applicationInstanceId)) { // Note that the a hash of the applicationInstanceId is being used to reduce file name length in around 70 characters // This is done to workaround PathTooLong exception in FileUploaderBase.cs since we don't have an interop for FileSystemWatcher // and .NET 4.5 used does not support long paths yet. traceFileNamePrefix = string.Format( CultureInfo.InvariantCulture, "{0}_{1:X8}_{2}_{3:D20}_{4}.", this.fabricNodeId, Path.GetFileName(applicationInstanceId).GetHashCode(), newTraceFileName, lastEventTicks, differentiator); } string traceFileNameWithoutPath = string.Concat( traceFileNamePrefix, "dtr"); string compressedTraceFileNameWithoutPath = string.Concat( traceFileNamePrefix, "dtr.zip"); string subFolder = GetTraceFileSubFolder(fileName); string traceFileDestinationPath = Path.Combine( this.csvFolder, subFolder); string traceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? compressedTraceFileNameWithoutPath : traceFileNameWithoutPath); string alternateTraceFileDestinationName = Path.Combine( traceFileDestinationPath, this.compressCsvFiles ? traceFileNameWithoutPath : compressedTraceFileNameWithoutPath); try { InternalFileSink.CopyFile(fileName, traceFileDestinationName, false, this.compressCsvFiles); FabricFile.Delete(fileName); this.traceSource.WriteInfo( this.logSourceId, "Traces are ready. They have been moved from {0} to {1}.", fileName, traceFileDestinationName); } catch (Exception e) { this.traceSource.WriteExceptionAsError( this.logSourceId, e, "Failed to move file from {0} to {1}.", fileName, traceFileDestinationName); } }
private static IDcaInMemoryConsumer CreateEtlToInMemoryBufferWriter(string logDirectory, bool?slow) { EventIndex globalLastEventIndexProcessed = default(EventIndex); globalLastEventIndexProcessed.Set(DateTime.MinValue, -1); InternalFileSink fileSink = new InternalFileSink(Utility.TraceSource, LogSourceId); EventIndex lastEventIndexProcessed = default(EventIndex); var onProcessingPeriodStartActionForFastWriter = new Action <string, bool, string>((traceFileName, isActiveEtl, traceFileSubFolder) => { fileSink.Initialize(); lastEventIndexProcessed.Set(DateTime.MinValue, -1); }); var maxIndexAlreadyProcessedForFastWriter = new Func <string, EventIndex>((traceFileSubFolder) => { return(globalLastEventIndexProcessed); }); var consumerProcessTraceEventActionForFastWriter = new Action <DecodedEventWrapper, string>((decodedEventWrapper, traceFileSubFolder) => { string etwEvent = decodedEventWrapper.StringRepresentation.Replace("\r\n", "\r\t").Replace("\n", "\t"); fileSink.WriteEvent(etwEvent); lastEventIndexProcessed.Set(decodedEventWrapper.Timestamp, decodedEventWrapper.TimestampDifferentiator); }); var onProcessingPeriodStopActionForFastWriter = new Action <string, bool, string>((traceFileName, isActiveEtl, traceFileSubFolder) => { if (slow.HasValue && slow.Value) { Thread.Sleep(2000); } fileSink.Close(); if (fileSink.WriteStatistics.EventsWritten > 0) { var additionalSuffix = (slow.HasValue) ? (slow.Value ? "_slow" : "_fast") : (string.Empty); var destFileName = CreateDestinationFileName(traceFileName, additionalSuffix, isActiveEtl, lastEventIndexProcessed); File.Move(fileSink.TempFileName, Path.Combine(logDirectory, "output", destFileName)); globalLastEventIndexProcessed.Set(lastEventIndexProcessed.Timestamp, lastEventIndexProcessed.TimestampDifferentiator); } else { fileSink.Delete(); } }); var dcaInMemoryConsumer = TestUtility.MockRepository.Create <IDcaInMemoryConsumer>(); dcaInMemoryConsumer.Setup(c => c.ConsumerProcessTraceEventAction) .Returns(consumerProcessTraceEventActionForFastWriter); dcaInMemoryConsumer.Setup(c => c.MaxIndexAlreadyProcessed) .Returns(maxIndexAlreadyProcessedForFastWriter); dcaInMemoryConsumer.Setup(c => c.OnProcessingPeriodStart(It.IsAny <string>(), It.IsAny <bool>(), It.IsAny <string>())) .Callback((string traceFileName, bool isActiveEtl, string traceFileSubFolder) => onProcessingPeriodStartActionForFastWriter(traceFileName, isActiveEtl, traceFileSubFolder)); dcaInMemoryConsumer.Setup(c => c.OnProcessingPeriodStop(It.IsAny <string>(), It.IsAny <bool>(), It.IsAny <string>())) .Callback((string traceFileName, bool isActiveEtl, string traceFileSubFolder) => onProcessingPeriodStopActionForFastWriter(traceFileName, isActiveEtl, traceFileSubFolder)); return(dcaInMemoryConsumer.Object); }