public AzureBlobEtwConfigReader( IConfigReader configReader, string sectionName, FabricEvents.ExtensionsEvents traceSource, string logSourceId) { this.configReader = configReader; this.sectionName = sectionName; this.traceSource = traceSource; this.logSourceId = logSourceId; this.azureUtility = new AzureUtility(traceSource, logSourceId); }
public AzureBlobConfigReader( IConfigReader configReader, string sectionName, FabricEvents.ExtensionsEvents traceSource, string logSourceId) { this.configReader = configReader; this.sectionName = sectionName; this.traceSource = traceSource; this.logSourceId = logSourceId; this.azureUtility = new AzureUtility(traceSource, logSourceId); this.containerParamName = AzureConstants.ContainerParamName; this.etwContainerParamName = AzureConstants.ContainerParamName; this.dataDeletionAgeParamName = AzureConstants.DataDeletionAgeParamName; this.dataDeletionAgeTestParamName = AzureConstants.TestDataDeletionAgeParamName; }
public AzureBlobEtwUploader(ConsumerInitializationParameters initParam) { this.stopping = false; this.initParam = initParam; this.logSourceId = string.Concat(initParam.ApplicationInstanceId, "_", initParam.SectionName); this.traceSource = new FabricEvents.ExtensionsEvents(FabricEvents.Tasks.FabricDCA); this.progressManager = new ConsumerProgressManager( this.traceSource, this.logSourceId, AzureBlobEtwConstants.MethodExecutionInitialRetryIntervalMs, AzureBlobEtwConstants.MethodExecutionMaxRetryCount, AzureBlobEtwConstants.MethodExecutionMaxRetryIntervalMs); this.configReader = new AzureBlobEtwConfigReader( new ConfigReader(initParam.ApplicationInstanceId), initParam.SectionName, this.traceSource, this.logSourceId); this.streamUploadPerfHelper = new AzureBlobPerformance(this.traceSource, this.logSourceId); this.fileUploadPerfHelper = new AzureBlobPerformance(this.traceSource, this.logSourceId); // Read blob-specific settings this.blobUploadSettings = this.GetSettings(); if (false == this.blobUploadSettings.Enabled) { return; } // Create the destination key var accountName = this.blobUploadSettings.StorageAccountFactory.Connection.UseDevelopmentStorage ? AzureConstants.DevelopmentStorageConnectionString : this.blobUploadSettings.StorageAccountFactory.Connection.AccountName; this.destinationKey = string.Join( "_", StandardPluginTypes.AzureBlobEtwUploader, accountName, this.blobUploadSettings.EtwTraceContainerName); // initialize bookmark folders and files var initializeBookmarkFoldersAndFilesSuccess = this.progressManager.InitializeBookmarkFoldersAndFiles( this.initParam.WorkDirectory, this.destinationKey); if (false == initializeBookmarkFoldersAndFilesSuccess) { const string Message = "Failed to initialize bookmark folders and files."; this.traceSource.WriteError( this.logSourceId, Message); throw new InvalidOperationException(Message); } // Create etw log directory this.etwLogDirName = this.CreateEtwLogDirectory(); if (string.IsNullOrEmpty(this.etwLogDirName)) { const string Message = "Failed to create etw log directory."; this.traceSource.WriteError( this.logSourceId, Message); throw new InvalidOperationException(Message); } // Create a sub-directory for the blob uploader this.workFolder = this.CreateBlobUploaderWorkSubDirectory(); if (string.IsNullOrEmpty(this.workFolder)) { const string Message = "Failed to create work folder for the blob uploader."; this.traceSource.WriteError( this.logSourceId, Message); throw new InvalidOperationException(Message); } // Create the helper object that writes events delivered from ETL files into an in-memory buffer. this.etlToInMemoryBufferWriter = new EtlToInMemoryBufferWriter( new TraceEventSourceFactory(), this.logSourceId, initParam.FabricNodeId, this.etwLogDirName, true, this); // Set the event filter this.etlToInMemoryBufferWriter.SetEtwEventFilter( this.blobUploadSettings.Filter, WinFabDefaultFilter.StringRepresentation, WinFabSummaryFilter.StringRepresentation, true); // Create the helper object that syncs local files to blob storage. // Local files will be created when upload of compressed memory stream to blob storage fails. this.fileBlobUploader = new AzureBlobUploader( this.traceSource, this.logSourceId, this.etwLogDirName, this.workFolder, this.blobUploadSettings.StorageAccountFactory, this.blobUploadSettings.EtwTraceContainerName, this.initParam.FabricNodeInstanceName, this.blobUploadSettings.DeploymentId, this.fileUploadPerfHelper, null, this.uploadFileAccessCondition); // Create a timer to schedule the upload of local files to blob storage string timerId = string.Concat( this.logSourceId, FileUploadTimerIdSuffix); this.fileUploadTimer = new DcaTimer( timerId, this.UploadFilesToDestinationBlob, this.blobUploadSettings.FileSyncInterval); this.fileUploadTimer.Start(); // Initialize trimmer this.trimmer = new AzureFileTrimmer( this.etwLogDirName, this.workFolder, this.blobUploadSettings.StorageAccountFactory, this.blobUploadSettings.EtwTraceContainerName, this.blobUploadSettings.BlobDeletionAge, this.initParam.FabricNodeInstanceName, this.blobUploadSettings.DeploymentId, AzureUtility.IsAzureInterfaceAvailable()); this.traceSource.WriteInfo( this.logSourceId, "Upload to blob storage is configured. Storage account: {0}, Trace container: {1}, Local trace path: {2}", this.blobUploadSettings.StorageAccountFactory.Connection.AccountName, this.blobUploadSettings.EtwTraceContainerName, this.etwLogDirName); this.traceSource.WriteInfo( this.logSourceId, "Windows Fabric event filters for Azure blob uploader: {0}", this.blobUploadSettings.Filter); }
internal CsvUploadWorker(CsvUploadWorkerParameters initParam, DiskSpaceManager diskSpaceManager) { // Initialization this.FlushDataOnDispose = false; this.initParam = initParam; this.logSourceId = this.initParam.UploaderInstanceId; this.traceSource = this.initParam.TraceSource; this.blobUploadSettings = this.initParam.Settings; this.azureUtility = new AzureUtility(this.traceSource, this.logSourceId); this.destinationKey = String.Join( "_", new string[] { StandardPluginTypes.AzureBlobCsvUploader, this.blobUploadSettings.StorageAccountFactory.Connection.UseDevelopmentStorage ? AzureConstants.DevelopmentStorageConnectionString : this.blobUploadSettings.StorageAccountFactory.Connection.AccountName, this.blobUploadSettings.LttTraceContainerName }); this.disposed = false; // Create a sub-directory for ourselves under the log directory bool success = GetCsvSubDirectory(); string sourceDirectory = Path.Combine(initParam.LogDirectory, "Traces"); // Create the helper object that writes events delivered from the LTT // files into CSV files. if (success) { this.csvToUploadFolderWriter = new CsvToUploadFolderWriter( this.logSourceId, this.initParam.FabricNodeId, this.csvFolder, sourceDirectory, diskSpaceManager, false); if (null == this.csvToUploadFolderWriter) { this.traceSource.WriteError( this.logSourceId, "Failed to create CSV to Upload Foler writer helper object."); success = false; } } if (success) { // Create a sub-directory for the uploader under the log directory success = GetUploaderWorkSubDirectory(); } if (this.blobUploadSettings.Enabled) { // Create and initialize the uploader // // NOTE: By specifying 'true' for the 'filterDeletionByNodeId' parameter, // we only delete those blobs that were uploaded by the current node. We // identify this via the Fabric node ID that the ETL-to-CSV writer prefixed // to the file name before uploading. This is done so that all nodes don't // wastefully try to delete all blobs. try { var destinationPath = string.Concat( this.blobUploadSettings.StorageAccountFactory.Connection.UseDevelopmentStorage ? AzureConstants.DevelopmentStorageConnectionString : this.blobUploadSettings.StorageAccountFactory.Connection.AccountName, ";", // This separator cannot occur in account name or container name this.blobUploadSettings.LttTraceContainerName); this.uploader = new AzureFileUploader( this.traceSource, this.logSourceId, this.csvFolder, destinationPath, this.workFolder, this.blobUploadSettings.StorageAccountFactory, this.blobUploadSettings.LttTraceContainerName, this.blobUploadSettings.UploadInterval, this.blobUploadSettings.FileSyncInterval, this.blobUploadSettings.BlobDeletionAge, this.initParam.FabricNodeInstanceName, this.blobUploadSettings.DeploymentId); this.uploader.Start(); } catch (Exception ex) { throw new InvalidOperationException("AzureFileUploader could not be constructed.", ex); } this.traceSource.WriteInfo( this.logSourceId, "Upload to blob storage is configured. Storage account: {0}, Trace container: {1}, Local trace Path: {2}," + "Upload interval (minutes): {3}", this.blobUploadSettings.StorageAccountFactory.Connection.AccountName, this.blobUploadSettings.LttTraceContainerName, this.csvFolder, this.blobUploadSettings.UploadInterval); } else { this.traceSource.WriteInfo( this.logSourceId, "Upload to blob storage is disabled (Storage key not available). Only log age management is enabled." + "Local trace Path: {0}", this.csvFolder ); } }
internal AzureBlobUploader( FabricEvents.ExtensionsEvents traceSource, string logSourceId, string etwLogDirName, string workFolder, StorageAccountFactory storageAccountFactory, string etlFileName, bool isActiveEtl, string containerName, string fabricNodeId, string fabricNodeInstanceName, string deploymentId, AzureBlobPerformance perfHelper, AccessCondition uploadStreamAccessCondition, AccessCondition uploadFileAccessCondition) { this.stopping = false; this.traceSource = traceSource; this.logSourceId = logSourceId; this.etwLogDirName = etwLogDirName; this.storageAccountFactory = storageAccountFactory; this.containerName = containerName; this.etlFileName = etlFileName; this.isActiveEtl = isActiveEtl; this.fabricNodeId = fabricNodeId; this.directoryName = AzureUtility.IsAzureInterfaceAvailable() ? string.Join( "/", string.IsNullOrEmpty(deploymentId) ? AzureUtility.DeploymentId : deploymentId, AzureUtility.RoleName, AzureUtility.RoleInstanceId) : fabricNodeInstanceName; this.localMap = Path.Combine(workFolder, LocalMapFolder); this.uploadStreamAccessCondition = uploadStreamAccessCondition; this.uploadFileAccessCondition = uploadFileAccessCondition; this.perfHelper = perfHelper; // Blob copy is done one at a time, so the concurrency count is 1. this.perfHelper.ExternalOperationInitialize( ExternalOperationTime.ExternalOperationType.BlobCopy, 1); this.streamWriter = new StreamWriter(new MemoryStream()); this.lastEventIndexProcessed.Set(DateTime.MinValue, -1); // Create the container at the destination try { Utility.PerformWithRetries( this.CreateContainer, (object)null, new RetriableOperationExceptionHandler(this.AzureStorageExceptionHandler), AzureBlobEtwConstants.MethodExecutionInitialRetryIntervalMs, AzureBlobEtwConstants.MethodExecutionMaxRetryCount, AzureBlobEtwConstants.MethodExecutionMaxRetryIntervalMs); } catch (Exception e) { var message = string.Format( "Error creating container {0}, account {1}.", this.containerName, this.storageAccountFactory.Connection.AccountName); this.traceSource.WriteExceptionAsError( this.logSourceId, e, message); throw new InvalidOperationException(message, e); } }
internal AzureFileUploader( FabricEvents.ExtensionsEvents traceSource, string logSourceId, string folderName, string destinationPath, string workFolder, StorageAccountFactory storageAccountFactory, string containerName, TimeSpan uploadIntervalMinutes, TimeSpan fileSyncIntervalInMinutes, TimeSpan blobDeletionAgeMinutes, string fabricNodeInstanceName, string deploymentId) : base( traceSource, logSourceId, folderName, destinationPath, workFolder, uploadIntervalMinutes, fileSyncIntervalInMinutes) { // Initialization this.storageAccountFactory = storageAccountFactory; this.containerName = containerName; this.directoryName = AzureUtility.IsAzureInterfaceAvailable() ? string.Join( "/", string.IsNullOrEmpty(deploymentId) ? AzureUtility.DeploymentId : deploymentId, AzureUtility.RoleName, AzureUtility.RoleInstanceId) : fabricNodeInstanceName; this.fabricNodeInstanceName = fabricNodeInstanceName; this.perfHelper = new AzureBlobPerformance(this.TraceSource, this.LogSourceId); // Blob copy is done one at a time, so the concurrency count is 1. this.perfHelper.ExternalOperationInitialize( ExternalOperationTime.ExternalOperationType.BlobCopy, 1); // Create the container at the destination try { Utility.PerformWithRetries( this.CreateContainer, (object)null, new RetriableOperationExceptionHandler(this.AzureStorageExceptionHandler)); } catch (Exception e) { var message = string.Format( "Error creating container {0}, account {1}.", this.containerName, this.storageAccountFactory.Connection.AccountName); this.TraceSource.WriteExceptionAsError( this.LogSourceId, e, message); throw new InvalidOperationException(message, e); } // Check if a trimmer already exists to delete old files from this destination. lock (Trimmers) { if (Trimmers.ContainsKey(destinationPath)) { // Trimmer already exists. Increment its reference count. TrimmerInfo trimmerInfo = Trimmers[destinationPath]; trimmerInfo.RefCount++; } else { // Trimmer does not exist. Create it. AzureFileTrimmer trimmer = new AzureFileTrimmer( folderName, LocalMapFolderPath, storageAccountFactory, containerName, blobDeletionAgeMinutes, fabricNodeInstanceName, deploymentId, AzureUtility.IsAzureInterfaceAvailable()); TrimmerInfo trimmerInfo = new TrimmerInfo { RefCount = 1, Trimmer = trimmer }; Trimmers[destinationPath] = trimmerInfo; } } }
public AzureTableEtwEventUploader(ConsumerInitializationParameters initParam) { // Initialization this.stopping = false; this.initParam = initParam; this.logSourceId = String.Concat(this.initParam.ApplicationInstanceId, "_", this.initParam.SectionName); this.traceSource = new FabricEvents.ExtensionsEvents(FabricEvents.Tasks.FabricDCA); this.configReader = new ConfigReader(initParam.ApplicationInstanceId); this.azureUtility = new AzureUtility(this.traceSource, this.logSourceId); this.perfHelper = new AzureTablePerformance(this.traceSource, this.logSourceId); // Make sure that the Azure interfaces are available if (false == AzureUtility.IsAzureInterfaceAvailable()) { const string Message = "Due to unavailability of Azure interfaces, ETW traces will not be uploaded to Azure table storage."; this.traceSource.WriteError( this.logSourceId, Message); throw new InvalidOperationException(Message); } this.azureNodeInstanceId = AzureUtility.RoleInstanceId; // Read table-specific settings from settings.xml GetSettings(); if (false == this.tableUploadSettings.Enabled) { // Upload to Azure table storage is not enabled, so return immediately return; } // Create a sub-directory for ourselves under the log directory string bufferedEventFolder = GetBufferedEventSubDirectory(); if (String.IsNullOrEmpty(bufferedEventFolder)) { throw new InvalidOperationException("Unable to get buffered event subdirectory."); } // Create the helper object that buffers events delivered from the ETL // files into CSV files on disk. this.bufferedEventProvider = new BufferedEtwEventProvider( new TraceEventSourceFactory(), this.logSourceId, bufferedEventFolder, this.tableUploadSettings.UploadIntervalMinutes, this.tableUploadSettings.EntityDeletionAge, this); if (null == this.bufferedEventProvider) { const string Message = "Failed to create buffered event provider helper object."; this.traceSource.WriteError( this.logSourceId, Message); throw new InvalidOperationException(Message); } // Set the filter for Windows Fabric events this.bufferedEventProvider.SetEtwEventFilter( this.tableUploadSettings.Filter, defaultTableFilter, WinFabSummaryFilter.StringRepresentation, true); // Initialize the batch upload concurrency count Debug.Assert(this.tableUploadSettings.BatchUploadConcurrencyCount <= AzureConstants.MaxBatchConcurrencyCount); if (this.tableUploadSettings.BatchUploadConcurrencyCount <= AzureConstants.MaxBatchConcurrencyCount) { this.batchConcurrencyCount = this.tableUploadSettings.BatchUploadConcurrencyCount; } else { this.traceSource.WriteError( this.logSourceId, "{0} is an invalid value for table batch concurrency count. The maximum supported value is {1} and that value will be used instead.", this.tableUploadSettings.BatchUploadConcurrencyCount, AzureConstants.MaxBatchConcurrencyCount); this.batchConcurrencyCount = AzureConstants.MaxBatchConcurrencyCount; } this.perfHelper.ExternalOperationInitialize( ExternalOperationTime.ExternalOperationType.TableUpload, this.batchConcurrencyCount); // Create the table try { CreateTable(); } catch (Exception e) { const string Message = "Due to an error in table creation ETW traces will not be uploaded to Azure table storage."; this.traceSource.WriteExceptionAsError( this.logSourceId, e, Message); throw new InvalidOperationException(Message, e); } this.traceSource.WriteInfo( this.logSourceId, "Created table for uploading ETW traces. Storage account: {0}, Table name: {1}", this.tableUploadSettings.StorageAccountFactory.Connection.AccountName, this.tableUploadSettings.TableName); // Initialize old log deletion this.trimmer = new AzureTableTrimmer( this.traceSource, this.logSourceId, this.tableUploadSettings.StorageAccountFactory, this.tableUploadSettings.TableName, this.tableUploadSettings.EntityDeletionAge, this.CreateDeletionQuery, this.perfHelper); }