public async Task AddEmote(CommandContext ctx, [Description("Name like `Kappa`")] string name, [Description("Url of the thing")] SdImage url) { var lang = await Language.GetLanguageFromCtxAsync(ctx); var bytes = await url.GetBytesAsync(HttpClient); var st = new MemoryStream(bytes); try { if (bytes.Length > 256000) { await ctx.RespondAsync(string.Format(lang.EmoteWasLargerThan256K, FileSizeUtils.FormatSize(st.Length))); st = (await ImageModule.ResizeAsync(bytes, new Size(128, 128))).Item1; } var emote = await ctx.Guild.CreateEmojiAsync(name, st, reason : "Added via silverbot by " + ctx.User.Username); await ctx.RespondAsync(emote.ToString()); } finally { await st.DisposeAsync(); } }
private void UpdatePlanProgress(Commands.GuiReportPlanProgress progress) { this.lblFilesTransferred.Text = string.Format("{0} of {1} ({2} / {3})", progress.Completed, progress.Total, FileSizeUtils.FileSizeToString(progress.BytesCompleted), FileSizeUtils.FileSizeToString(progress.BytesTotal)); }
public Item(DiscElement discElement) { Name = discElement.GetName(); Extension = discElement.GetExtension(); Size = FileSizeUtils.ByteToHumanReadable(discElement.GetSize()); Date = discElement.CreationDate.ToString(); Path = discElement.path; }
protected override void BuildRequestBody() { // Sync RequestBody.Add("FileCount", Report.SyncResults.Stats.FileCount); RequestBody.Add("SavedFileCount", Report.SyncResults.Stats.SavedFileCount); // Sizes RequestBody.Add("TotalSize", FileSizeUtils.FileSizeToString(Report.SyncResults.Stats.TotalSize)); }
public static void Main() { var size = 500_000_000_000; var dps = 3; Console.WriteLine(FileSizeUtils.ToFileSizeString(size, dps)); Console.WriteLine(FileSizeUtils.ToFileSizeString(size, FileSizeMode.Binary, dps)); Console.WriteLine(FileSizeUtils.ToFileSizeString(size, FileSizeMode.SI, dps)); }
protected override void BuildRequestBody() { // Status RequestBody.Add("Status", Report.OperationStatus.ToString()); // Transfers RequestBody.Add("Total", Report.TransferResults.Stats.Total); RequestBody.Add("Pending", Report.TransferResults.Stats.Pending); RequestBody.Add("Running", Report.TransferResults.Stats.Running); RequestBody.Add("Failed", Report.VersionerResults.Stats.Failed + Report.TransferResults.Stats.Failed); RequestBody.Add("Canceled", Report.TransferResults.Stats.Canceled); RequestBody.Add("Completed", Report.TransferResults.Stats.Completed); // Sizes RequestBody.Add("TotalSize", FileSizeUtils.FileSizeToString(Report.TransferResults.Stats.BytesTotal)); RequestBody.Add("PendingSize", FileSizeUtils.FileSizeToString(Report.TransferResults.Stats.BytesPending)); RequestBody.Add("FailedSize", FileSizeUtils.FileSizeToString(Report.TransferResults.Stats.BytesFailed)); RequestBody.Add("CanceledSize", FileSizeUtils.FileSizeToString(Report.TransferResults.Stats.BytesCanceled)); RequestBody.Add("CompletedSize", FileSizeUtils.FileSizeToString(Report.TransferResults.Stats.BytesCompleted)); }
private void UpdatePlanInfo(Commands.GuiReportPlanStatus report) { CurrentOperation.Status = report.Status; switch (report.Status) { default: return; // TODO(jweyrich): Somehow report unexpected status? case Commands.OperationStatus.NOT_RUNNING: case Commands.OperationStatus.INTERRUPTED: { Models.BackupPlan plan = Model as Models.BackupPlan; //this.lblSources.Text = report.Sources; this.llblRunNow.Text = report.Status == Commands.OperationStatus.NOT_RUNNING ? LBL_RUNNOW_STOPPED : LBL_RUNNOW_RESUME; this.llblRunNow.Enabled = true; this.lblStatus.Text = report.Status == Commands.OperationStatus.NOT_RUNNING ? LBL_STATUS_STOPPED : LBL_STATUS_INTERRUPTED; this.lblDuration.Text = LBL_DURATION_INITIAL; this.lblFilesTransferred.Text = LBL_FILES_TRANSFER_STOPPED; this.llblEditPlan.Enabled = true; this.llblDeletePlan.Enabled = true; this.llblRestore.Enabled = true; this.lblLastRun.Text = PlanCommon.Format(CurrentOperation.LastRunAt); this.lblLastSuccessfulRun.Text = PlanCommon.Format(CurrentOperation.LastSuccessfulRunAt); //this.lblTitle.Text = PlanCommon.FormatTitle(plan.Name); //this.lblSchedule.Text = plan.ScheduleType.ToString(); break; } case Commands.OperationStatus.STARTED: case Commands.OperationStatus.RESUMED: { Models.BackupPlan plan = Model as Models.BackupPlan; CurrentOperation.StartedAt = report.StartedAt; CurrentOperation.LastRunAt = report.LastRunAt; CurrentOperation.LastSuccessfulRunAt = report.LastSuccessfulRunAt; this.lblSources.Text = this.lblSources.Text = plan.SelectedSourcesAsDelimitedString(", ", 50, "..."); // Duplicate from BackupOperation.cs - Sources property this.llblRunNow.Text = LBL_RUNNOW_RUNNING; this.llblRunNow.Enabled = true; this.lblStatus.Text = LBL_STATUS_STARTED; this.lblDuration.Text = LBL_DURATION_STARTED; this.lblFilesTransferred.Text = string.Format("{0} of {1} ({2} / {3})", 0, 0, FileSizeUtils.FileSizeToString(0), FileSizeUtils.FileSizeToString(0)); this.llblEditPlan.Enabled = false; this.llblDeletePlan.Enabled = false; this.llblRestore.Enabled = false; this.lblLastRun.Text = PlanCommon.Format(CurrentOperation.LastRunAt); this.lblLastSuccessfulRun.Text = PlanCommon.Format(CurrentOperation.LastSuccessfulRunAt); //this.lblTitle.Text = PlanCommon.FormatTitle(plan.Name); //this.lblSchedule.Text = plan.ScheduleType.ToString(); CurrentOperation.GotInitialInfo = true; CurrentOperation.StartTimer(); break; } case Commands.OperationStatus.SCANNING_FILES_STARTED: { this.lblSources.Text = "Scanning files..."; break; } case Commands.OperationStatus.SCANNING_FILES_FINISHED: { break; } case Commands.OperationStatus.PROCESSING_FILES_STARTED: { this.lblSources.Text = "Processing files..."; this.llblRunNow.Text = LBL_RUNNOW_RUNNING; this.llblRunNow.Enabled = true; this.lblStatus.Text = LBL_STATUS_STARTED; this.llblEditPlan.Enabled = false; this.llblDeletePlan.Enabled = false; break; } case Commands.OperationStatus.PROCESSING_FILES_FINISHED: { this.lblSources.Text = report.Sources; this.llblRunNow.Text = LBL_RUNNOW_RUNNING; this.llblRunNow.Enabled = true; this.lblStatus.Text = LBL_STATUS_STARTED; this.llblEditPlan.Enabled = false; this.llblDeletePlan.Enabled = false; //this.lblFilesTransferred.Text = string.Format("{0} of {1} ({2} / {3})", // progress.Completed, progress.Total, // FileSizeUtils.FileSizeToString(progress.BytesCompleted), // FileSizeUtils.FileSizeToString(progress.BytesTotal)); break; } case Commands.OperationStatus.UPDATED: { // Should be handled by another command. break; } case Commands.OperationStatus.FINISHED: { CurrentOperation.FinishedAt = report.FinishedAt; CurrentOperation.LastRunAt = report.LastRunAt; CurrentOperation.LastSuccessfulRunAt = report.LastSuccessfulRunAt; UpdateDuration(report.Status); this.lblSources.Text = report.Sources; this.llblRunNow.Text = LBL_RUNNOW_STOPPED; this.llblRunNow.Enabled = true; this.lblStatus.Text = LBL_STATUS_COMPLETED; //this.lblDuration.Text = LBL_DURATION_INITIAL; //this.lblFilesTransferred.Text = LBL_FILES_TRANSFER_STOPPED; this.llblEditPlan.Enabled = true; this.llblDeletePlan.Enabled = true; this.llblRestore.Enabled = true; this.lblLastRun.Text = PlanCommon.Format(CurrentOperation.LastRunAt); this.lblLastSuccessfulRun.Text = PlanCommon.Format(CurrentOperation.LastSuccessfulRunAt); //this.lblTitle.Text = PlanCommon.FormatTitle(plan.Name); //this.lblSchedule.Text = plan.ScheduleType.ToString(); CurrentOperation.Reset(); break; } case Commands.OperationStatus.FAILED: case Commands.OperationStatus.CANCELED: { CurrentOperation.FinishedAt = report.LastRunAt; CurrentOperation.LastRunAt = report.LastRunAt; UpdateDuration(report.Status); this.lblSources.Text = report.Sources; this.llblRunNow.Text = LBL_RUNNOW_STOPPED; this.llblRunNow.Enabled = true; this.lblStatus.Text = report.Status == Commands.OperationStatus.CANCELED ? LBL_STATUS_CANCELED : LBL_STATUS_FAILED; //this.lblDuration.Text = LBL_DURATION_INITIAL; //this.lblFilesTransferred.Text = LBL_FILES_TRANSFER_STOPPED; this.llblEditPlan.Enabled = true; this.llblDeletePlan.Enabled = true; this.llblRestore.Enabled = true; this.lblLastRun.Text = PlanCommon.Format(CurrentOperation.LastRunAt); //this.lblLastSuccessfulRun.Text = PlanCommon.Format(CurrentOperation.LastSuccessfulRunAt); //this.lblTitle.Text = PlanCommon.FormatTitle(plan.Name); //this.lblSchedule.Text = plan.ScheduleType.ToString(); CurrentOperation.Reset(); break; } } }
private void UpdateStatsInfo(SyncOperationStatus status, bool runningRemotely = false) { if (RunningOperation == null) { return; } switch (status) { default: throw new ArgumentException("Unhandled status", "status"); case SyncOperationStatus.Unknown: { this.lblRemoteDirectory.Text = RunningOperation.RemoteRootDirectory; this.lblStatus.Text = LBL_STATUS_STOPPED; this.llblRunNow.Text = LBL_RUNNOW_STOPPED; this.lblTotalFiles.Text = LBL_TOTALFILES_STOPPED; this.lblFilesSynced.Text = LBL_FILESSYNCED_STOPPED; this.lblDuration.Text = LBL_DURATION_INITIAL; this.btnPrevious.Enabled = false; this.btnNext.Enabled = false; this.btnFinish.Enabled = false; break; } case SyncOperationStatus.Started: //case SyncOperationStatus.Resumed: { Assert.IsNotNull(OperationResults); this.lblRemoteDirectory.Text = RunningOperation.RemoteRootDirectory; this.llblRunNow.Text = LBL_RUNNOW_RUNNING; this.lblStatus.Text = LBL_STATUS_STARTED; this.lblDuration.Text = LBL_DURATION_STARTED; this.lblTotalFiles.Text = LBL_TOTALFILES_STARTED; this.lblFilesSynced.Text = LBL_FILESSYNCED_STARTED; timer1.Enabled = true; timer1.Start(); break; } case SyncOperationStatus.ListingUpdated: { long totalSize = OperationResults.Stats.TotalSize; string totalSizeAsStr = totalSize == 0 ? "Completed" : FileSizeUtils.FileSizeToString(totalSize); this.lblTotalFiles.Text = string.Format("{0} files ({1})", OperationResults.Stats.FileCount, totalSizeAsStr); break; } case SyncOperationStatus.SavingUpdated: { Dispatcher.Invoke(() => { this.lblFilesSynced.Text = string.Format("{0} of {1}", OperationResults.Stats.SavedFileCount, OperationResults.Stats.FileCount); }); break; } case SyncOperationStatus.Canceled: case SyncOperationStatus.Failed: { UpdateDuration(status); this.lblRemoteDirectory.Text = RunningOperation.RemoteRootDirectory; this.llblRunNow.Text = LBL_RUNNOW_STOPPED; this.lblStatus.Text = status == SyncOperationStatus.Canceled ? LBL_STATUS_CANCELED : LBL_STATUS_FAILED; this.lblFilesSynced.Text = LBL_FILESSYNCED_STOPPED; timer1.Stop(); timer1.Enabled = false; this.btnPrevious.Enabled = true; this.btnNext.Enabled = true; this.btnFinish.Enabled = true; if (!runningRemotely) { // Update timestamps. Models.Synchronization sync = Model as Models.Synchronization; //sync.LastRunAt = DateTime.UtcNow; _daoSynchronization.Update(sync); } break; } case SyncOperationStatus.Finished: { UpdateDuration(status); this.lblRemoteDirectory.Text = RunningOperation.RemoteRootDirectory; this.llblRunNow.Text = LBL_RUNNOW_STOPPED; this.lblStatus.Text = LBL_STATUS_COMPLETED; timer1.Stop(); timer1.Enabled = false; this.btnPrevious.Enabled = true; this.btnNext.Enabled = true; this.btnFinish.Enabled = true; if (!runningRemotely) { // Update timestamps. Models.Synchronization sync = Model as Models.Synchronization; //sync.LastRunAt = sync.LastSuccessfulRunAt = DateTime.UtcNow; _daoSynchronization.Update(sync); } break; } } }
protected async void DoRestore(CustomRestoreAgent agent, Models.Restore restore, RestoreOperationOptions options) { try { CurrentState = RestoreOperationState.STARTING; OnStart(agent, restore); // Mount all network mappings and abort if there is any network mapping failure. CurrentState = RestoreOperationState.MAPPING_NETWORK_DRIVES; Helper.MountAllNetworkDrives(); // Execute pre-actions CurrentState = RestoreOperationState.EXECUTING_PRE_ACTIONS; Helper.ExecutePreActions(); // // Scanning // CurrentState = RestoreOperationState.SCANNING_FILES; LinkedList <CustomVersionedFile> filesToProcess = null; { Task <PathScanResults <CustomVersionedFile> > filesToProcessTask = GetFilesToProcess(restore); { var message = string.Format("Scanning files started."); Info(message); //StatusInfo.Update(BackupStatusLevel.INFO, message); OnUpdate(new RestoreOperationEvent { Status = RestoreOperationStatus.ScanningFilesStarted, Message = message }); } try { await filesToProcessTask; } catch (Exception ex) { if (ex.IsCancellation()) { string message = string.Format("Scanning files was canceled."); Report.AddErrorMessage(message); logger.Warn(message); } else { string message = string.Format("Caught exception during scanning files: {0}", ex.Message); Report.AddErrorMessage(message); logger.Log(LogLevel.Error, ex, message); } if (filesToProcessTask.IsFaulted || filesToProcessTask.IsCanceled) { if (filesToProcessTask.IsCanceled) { OnCancelation(agent, restore, ex); // filesToProcessTask.Exception } else { OnFailure(agent, restore, ex); // filesToProcessTask.Exception } return; } } filesToProcess = filesToProcessTask.Result.Files; { foreach (var entry in filesToProcessTask.Result.FailedFiles) { Report.AddErrorMessage(entry.Value); } if (filesToProcessTask.Result.FailedFiles.Count > 0) { StringBuilder sb = new StringBuilder(); sb.AppendLine("Scanning failed for the following drives/files/directories:"); foreach (var entry in filesToProcessTask.Result.FailedFiles) { sb.AppendLine(string.Format(" Path: {0} - Reason: {1}", entry.Key, entry.Value)); } Warn(sb.ToString()); } var message = string.Format("Scanning files finished."); Info(message); //StatusInfo.Update(BackupStatusLevel.INFO, message); OnUpdate(new RestoreOperationEvent { Status = RestoreOperationStatus.ScanningFilesFinished, Message = message }); } } // // Versioning // CurrentState = RestoreOperationState.VERSIONING_FILES; { Task versionerTask = DoVersionFiles(restore, filesToProcess); { var message = string.Format("Processing files started."); Info(message); //StatusInfo.Update(RestoreStatusLevel.INFO, message); OnUpdate(new RestoreOperationEvent { Status = RestoreOperationStatus.ProcessingFilesStarted, Message = message }); } try { await versionerTask; } catch (Exception ex) { if (ex.IsCancellation()) { string message = string.Format("Processing files was canceled."); Report.AddErrorMessage(message); logger.Warn(message); } else { string message = string.Format("Caught exception during processing files: {0}", ex.Message); Report.AddErrorMessage(message); logger.Log(LogLevel.Error, ex, message); } if (versionerTask.IsFaulted || versionerTask.IsCanceled) { Versioner.Undo(); if (versionerTask.IsCanceled) { OnCancelation(agent, restore, ex); // versionerTask.Exception } else { OnFailure(agent, restore, ex); // versionerTask.Exception } return; } } agent.Files = Versioner.FilesToTransfer; { var message = string.Format("Processing files finished."); Info(message); //StatusInfo.Update(BackupStatusLevel.INFO, message); OnUpdate(new RestoreOperationEvent { Status = RestoreOperationStatus.ProcessingFilesFinished, Message = message }); } { agent.Results.Stats.BytesTotal = agent.EstimatedTransferSize; var message = string.Format("Estimated restore size: {0} files, {1}", agent.Files.Count(), FileSizeUtils.FileSizeToString(agent.EstimatedTransferSize)); Info(message); } } // // Transfer files // CurrentState = RestoreOperationState.TRANSFERRING_FILES; { Task <TransferResults> transferTask = agent.Start(); Report.TransferResults = transferTask.Result; { var message = string.Format("Transfer files started."); Info(message); } try { await transferTask; } catch (Exception ex) { if (ex.IsCancellation()) { string message = string.Format("Transfer files was canceled."); Report.AddErrorMessage(message); logger.Warn(message); } else { string message = string.Format("Caught exception during transfer files: {0}", ex.Message); Report.AddErrorMessage(message); logger.Log(LogLevel.Error, ex, message); } if (transferTask.IsFaulted || transferTask.IsCanceled) { if (transferTask.IsCanceled) { OnCancelation(agent, restore, ex); // transferTask.Exception } else { OnFailure(agent, restore, ex); // transferTask.Exception } return; } } { var message = string.Format("Transfer files finished."); Info(message); } } CurrentState = RestoreOperationState.EXECUTING_POST_ACTIONS; Helper.ExecutePostActions(Report.TransferResults); CurrentState = RestoreOperationState.FINISHING; OnFinish(agent, restore); } catch (Exception ex) { OnFinish(agent, restore, ex); } }
private async void DoRun() { _IsRunning = true; SaveSettings(); AsyncHelper.SettingsMaxThreadCount = Decimal.ToInt32(nudParallelism.Value); if (_TransferAgent != null) { _TransferAgent.Dispose(); } transferListControl1.ClearTransfers(); AWSCredentials awsCredentials = new BasicAWSCredentials(tbAccessKey.Text, tbSecretKey.Text); TransferAgentOptions options = new TransferAgentOptions(); if (CancelTokenSource != null) { CancelTokenSource.Dispose(); } CancelTokenSource = new CancellationTokenSource(); _TransferAgent = new S3TransferAgent(options, awsCredentials, tbBucketName.Text, CancelTokenSource.Token); _TransferAgent.RemoteRootDir = "backup-99"; switch (_Operation) { case OperationType.BACKUP: { _BackupAgent = new CustomBackupAgent(_TransferAgent); _BackupAgent.Results.Monitor = transferListControl1; /* * _BackupAgent.Results.Failed += (object sender, TransferFileProgressArgs args, Exception e) => * { * Warn("Failed {0}", args.FilePath); * }; * _BackupAgent.Results.Canceled += (object sender, TransferFileProgressArgs args, Exception e) => * { * Warn("Canceled {0}", args.FilePath); * }; * _BackupAgent.Results.Completed += (object sender, TransferFileProgressArgs args) => * { * Info("Completed {0}", args.FilePath); * }; * _BackupAgent.Results.Started += (object sender, TransferFileProgressArgs args) => * { * Info("Started {0}", args.FilePath); * }; * _BackupAgent.Results.Progress += (object sender, TransferFileProgressArgs args) => * { * Info("Progress {0}% {1} ({2}/{3} bytes)", * args.PercentDone, args.FilePath, args.TransferredBytes, args.TotalBytes); * }; */ LinkedList <CustomVersionedFile> sources = new LinkedList <CustomVersionedFile>(); if (cbSimulateFailure.Checked) { sources.AddLast(new CustomVersionedFile(@"C:\pagefile.sys")); } DirectoryInfo dir = new DirectoryInfo(txtSourceDirectory.Text); if (dir != null) { foreach (FileInfo file in dir.GetFiles()) { sources.AddLast(new CustomVersionedFile(file.FullName)); } } _BackupAgent.Files = sources; Info("Estimate backup size: {0} files, {1} bytes", _BackupAgent.Results.Stats.Total, FileSizeUtils.FileSizeToString(_BackupAgent.EstimatedTransferSize)); Task task = _BackupAgent.Start(); try { await task; } catch (Exception ex) { if (ex.IsCancellation()) { Info(ex.Message); } else { Error(ex.Message); } } break; } case OperationType.RESTORE: { _RestoreAgent = new CustomRestoreAgent(_TransferAgent); _RestoreAgent.Results.Monitor = transferListControl1; // TODO(jweyrich): These are statically hardcoded for now, but they should be dynamic. // To make them dynamic we need to execute a Sync operation to discover them first. LinkedList <CustomVersionedFile> sources = new LinkedList <CustomVersionedFile>(); sources.AddLast(new CustomVersionedFile(@"C:\pagefile.sys")); sources.AddLast(new CustomVersionedFile(@"C:\Teste\a.txt")); sources.AddLast(new CustomVersionedFile(@"C:\Teste\b.txt")); sources.AddLast(new CustomVersionedFile(@"C:\Teste\bash.exe")); sources.AddLast(new CustomVersionedFile(@"C:\Teste\c.txt")); sources.AddLast(new CustomVersionedFile(@"C:\Teste\e.txt")); _RestoreAgent.Files = sources; Info("Estimate backup size: {0} files, {1} bytes", _RestoreAgent.Results.Stats.Total, FileSizeUtils.FileSizeToString(_RestoreAgent.EstimatedTransferSize)); Task task = _RestoreAgent.Start(); try { await task; } catch (Exception ex) { if (ex.IsCancellation()) { Info(ex.Message); } else { Error(ex.Message); } } break; } } OnFinish(); }
protected async void DoSynchronization(CustomSynchronizationAgent agent, Models.Synchronization sync, SyncOperationOptions options) { try { CurrentState = SyncOperationState.STARTING; OnStart(agent, sync); // // Synchronization // CurrentState = SyncOperationState.SYNCHRONIZING_FILES; { Task syncTask = agent.Start(TransferAgent.RemoteRootDir, true); { var message = string.Format("Synchronizing files started."); Info(message); //StatusInfo.Update(SyncStatusLevel.INFO, message); OnUpdate(new SyncOperationEvent { Status = SyncOperationStatus.Started, Message = message }); } try { await syncTask; } catch (Exception ex) { if (ex.IsCancellation()) { logger.Warn("Synchronizing files was canceled."); } else { logger.Log(LogLevel.Error, ex, "Caught exception during synchronizing files"); } if (syncTask.IsFaulted || syncTask.IsCanceled) { if (syncTask.IsCanceled) { OnCancelation(agent, sync, ex); // syncTask.Exception } else { OnFailure(agent, sync, ex); // syncTask.Exception } return; } } { var message = string.Format("Synchronizing files finished."); Info(message); //StatusInfo.Update(SyncStatusLevel.INFO, message); //OnUpdate(new SyncOperationEvent { Status = SyncOperationStatus.Finished, Message = message }); } { var message = string.Format("Estimated synchronization size: {0} files, {1}", RemoteObjects.Count(), FileSizeUtils.FileSizeToString(agent.Results.Stats.TotalSize)); Info(message); } } // // Database files saving // CurrentState = SyncOperationState.SAVING_TO_DATABASE; { Task saveTask = ExecuteOnBackround(() => { // Save everything. Save(CancellationTokenSource.Token); }, CancellationTokenSource.Token); { var message = string.Format("Database files saving started."); Info(message); } try { await saveTask; } catch (Exception ex) { if (ex.IsCancellation()) { logger.Warn("Database files saving was canceled."); } else { logger.Log(LogLevel.Error, ex, "Caught exception during database files saving"); } if (saveTask.IsFaulted || saveTask.IsCanceled) { if (saveTask.IsCanceled) { OnCancelation(agent, sync, ex); // saveTask.Exception } else { OnFailure(agent, sync, ex); // saveTask.Exception } return; } } { var message = string.Format("Database files saving finished."); Info(message); } } CurrentState = SyncOperationState.FINISHING; OnFinish(agent, sync); } catch (Exception ex) { OnFinish(agent, sync, ex); } }
private void btnStart_Click(object sender, EventArgs e) { if (IsRunning) { return; } IsRunning = true; CancellationTokenSource = new CancellationTokenSource(); var options = new TransferAgentOptions { UploadChunkSizeInBytes = 1 * 1024 * 1024, }; string accessKey = txtAccessKey.Text.Trim(); string secretKey = txtSecretKey.Text.Trim(); string bucketName = txtBucketName.Text.Trim(); BasicAWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey); string localFilePath = txtFilePath.Text; bool fileInformed = !string.IsNullOrEmpty(localFilePath); bool fileExists = fileInformed && FileManager.FileExists(localFilePath); if (!fileInformed || !fileExists) { string message = ""; if (!fileInformed) { message = "You have to inform a file for upload"; } else if (!fileExists) { message = string.Format("The informed file does not exist: {0}", localFilePath); } MessageBox.Show(message, "Oops!", MessageBoxButtons.OK, MessageBoxIcon.Error); IsRunning = false; return; } #if true string remoteFilePath = typeof(UploadPerfTestControl).Name + ".DELETE_ME"; #else S3PathBuilder pathBuilder = new S3PathBuilder(); string remoteFilePath = pathBuilder.BuildRemotePath(localFilePath); #endif long fileSize = FileManager.UnsafeGetFileSize(localFilePath); BlockPerfStats stats = new BlockPerfStats(); S3TransferAgent xferAgent = new S3TransferAgent(options, credentials, bucketName, CancellationTokenSource.Token); xferAgent.UploadFileStarted += (object sender1, TransferFileProgressArgs e1) => { stats.Begin(); }; xferAgent.UploadFileCanceled += (object sender1, TransferFileProgressArgs e1) => { stats.End(); string message = "Canceled file upload"; MessageBox.Show(message, "Transfer canceled", MessageBoxButtons.OK, MessageBoxIcon.Information); }; xferAgent.UploadFileFailed += (object sender1, TransferFileProgressArgs e1) => { stats.End(); string message = string.Format("Failed to upload file: {0}\n{1}", e1.Exception.GetType().Name, e1.Exception.Message); MessageBox.Show(message, "Transfer failed", MessageBoxButtons.OK, MessageBoxIcon.Error); }; xferAgent.UploadFileCompleted += (object sender1, TransferFileProgressArgs e1) => { stats.End(); string message = string.Format( "Took {0} to upload {1}", TimeSpanUtils.GetReadableTimespan(stats.Duration), FileSizeUtils.FileSizeToString(fileSize) ); MessageBox.Show(message, "Transfer completed", MessageBoxButtons.OK, MessageBoxIcon.Information); }; //xferAgent.UploadFileProgress += (object sender1, TransferFileProgressArgs e1) => //{ // // ... //}; xferAgent.UploadFile(localFilePath, remoteFilePath, null); IsRunning = false; }
/// <summary> /// Iterate through all prescribed directories and look to optimeze by either deleting or /// archiveing/zipping files /// </summary> /// <param name="healthCheckParameters">parameters that include size of file before archiving and directories to work on</param> /// <returns> object</returns> public override HealthMonitorResult DoHealthCheck(IEnumerable <BoundsLimit> healthCheckParameters) { const string maxDays = "maxDays"; const string maxSize = "maxSize"; HealthMonitorResult result = new HealthMonitorResult(Name, HealthType, ResultStatus.Information); string[] spaceOptimizeCheckParams = new[] { maxDays, maxSize }; BoundsLimit[] checkParameters = healthCheckParameters as BoundsLimit[] ?? healthCheckParameters.ToArray(); EnsureAllParametersArePresent(spaceOptimizeCheckParams, checkParameters); int daysOld = int.Parse(checkParameters.Single(x => x.Name.Equals(maxDays)).Value); long fileSize = long.Parse(checkParameters.Single(x => x.Name.Equals(maxSize)).Value); spaceOptimizationService.MaxDays = daysOld; spaceOptimizationService.MaxSize = fileSize; BoundsLimit boundsLimit = checkParameters.FirstOrDefault(x => x.Name.ToLower().Equals(ServiceTimeoutMillisecondsParameter.ToLower())); if (boundsLimit != null) { int serviceTimeoutMilliseconds = int.Parse(boundsLimit.Value); ServiceTimeout = TimeSpan.FromMilliseconds(serviceTimeoutMilliseconds); } //the list of folder(s) to work on are of type 'folder' List <string> directories = checkParameters.Where(x => x.Type.Equals("folder")) .Select(y => y.Value) .ToList(); ServiceController[] services = checkParameters.Where(x => x.Type.Equals("service")) .Select(y => new ServiceController(y.Value)) .ToArray(); HealthMonitorResult resultToStop = StopServices(services); result.MessageBuilder.AppendNewLine(resultToStop.MessageBuilder.ToString()); List <SpaceOptimizationSummary> optimizations = new List <SpaceOptimizationSummary>(); //iterete through directories foreach (string path in directories) { try { DirectoryInfo directory = new DirectoryInfo(path); archiveService.ArchiveFileName = Path.Combine(path, ZIP_FILE_NAME); SpaceOptimizationSummary summary = new SpaceOptimizationSummary(DateTime.Now, new List <FileOptimized>(), path, directory.GetCurrentSizeExceptFiles(".zip")); result.MessageBuilder.AppendNewLine(DeleteOldArchivedFiles(summary, daysOld)); result.MessageBuilder.AppendNewLine(DeleteOldArchivedFiles(summary, path)); summary.CurrentSize = directory.GetCurrentSizeExceptFiles(".zip"); result.MessageBuilder.AppendNewLine(summary.ToString()); optimizations.Add(summary); } catch (Exception e) { result.MessageBuilder.AppendNewLine(e.ToLogString()); result.Status = ResultStatus.Warning; } } HealthMonitorResult resultToStart = StartServices(services); result.MessageBuilder.AppendNewLine(resultToStart.MessageBuilder.ToString()); string spaceSavedMessage = string.Format(SystemConstants.TOTAL_FILE_SIZE_MSG, FileSizeUtils.FileSizeDescription(optimizations.Sum(x => x.SpaceSaved))); result.MessageBuilder.AppendNewLine(spaceSavedMessage); return(result); }