private void CopyBaseImageToDestination() { var source = this.blobObjectFactory.Create(baseVhdBlobUri); source.FetchAttributes(); var copyStatus = new ProgressStatus(0, source.Properties.Length); using (new ProgressTracker(copyStatus, Program.SyncOutput.ProgressCopyStatus, Program.SyncOutput.ProgressCopyComplete,TimeSpan.FromSeconds(1))) { destinationBlob.StartCopyFromBlob(source); destinationBlob.FetchAttributes(); while (true) { if (destinationBlob.CopyState.BytesCopied != null) { copyStatus.AddToProcessedBytes(destinationBlob.CopyState.BytesCopied.Value - copyStatus.BytesProcessed); } if (destinationBlob.CopyState.Status == CopyStatus.Success) { break; } if (destinationBlob.CopyState.Status == CopyStatus.Pending) { Thread.Sleep(TimeSpan.FromSeconds(1)); } else { throw new ApplicationException( string.Format("Cannot copy source '{0}' to destination '{1}', copy state is '{2}'", source.Uri, destinationBlob.Uri, destinationBlob.CopyState)); } destinationBlob.FetchAttributes(); } } }
public Progress() { this.progress = 0; this.status = ProgressStatus.Running; this.TimeMilliseconds = double.NaN; this.start = DateTime.Now; }
public void Process(MGFile file, ProgressStatus reporter) { var fileInfo = new FileInfo(file.FileName); file.Tags.Set("LastWriteTime", fileInfo.LastWriteTime); file.Tags.Set("Extension", fileInfo.Extension); file.Tags.Set("FileSizeBytes", fileInfo.Length); }
public void Process(MGFile file, ProgressStatus reporter) { FileMetadata fileInfo = m_MediaInfo.ReadFile(file.FileName); file.Tags.Set(TVShowDataStoreTemplate.Format, fileInfo.Format); if (fileInfo.AudioStreams.Any()) { AudioStreamInfo audio = fileInfo.AudioStreams.First(); file.Tags.Set(TVShowDataStoreTemplate.AudioCodec, audio.CodecString); } if (fileInfo.VideoStreams.Any()) { VideoStreamInfo video = fileInfo.VideoStreams.First(); file.Tags.Set(TVShowDataStoreTemplate.VideoCodec, video.CodecString); file.Tags.Set(TVShowDataStoreTemplate.VideoCodecProfile, video.CodecProfile); file.Tags.Set(TVShowDataStoreTemplate.Resolution, string.Format("{0}x{1}", video.WidthPx, video.HeightPx)); file.Tags.Set(TVShowDataStoreTemplate.VideoWidthPx, video.WidthPx); file.Tags.Set(TVShowDataStoreTemplate.VideoHeightPx, video.HeightPx); file.Tags.Set(TVShowDataStoreTemplate.PlayTime, video.PlayTime); file.Tags.Set(TVShowDataStoreTemplate.VideoDisplayAspectRatio, video.DisplayAspectRatio); file.Tags.Set(TVShowDataStoreTemplate.FrameCount, video.FrameCount); file.Tags.Set(TVShowDataStoreTemplate.FrameRate, video.FrameRate); } file.Tags.Set(TVShowDataStoreTemplate.iPod5GCompatible, fileInfo.IsCompatible(DeviceType.iPod5G)); file.Tags.Set(TVShowDataStoreTemplate.iPodClassicCompatible, fileInfo.IsCompatible(DeviceType.iPodClassic)); file.Tags.Set(TVShowDataStoreTemplate.iPhoneCompatible, fileInfo.IsCompatible(DeviceType.iPhone)); }
private void Progressable_ProgressChanged(object sender, ProgressStatus e) { Dispatcher.Invoke(() => { syncProgress.Value = e.DoneCount; syncProgress.Maximum = e.TotalCount; syncProgressValue.Text = string.Format("{0} из {1}", e.DoneCount, e.TotalCount); }); }
public KissMangaSeries(string siteUri, string name, string tag, MangaSite site, ProgressStatus status) { this.SiteUri = siteUri; this.Tag = tag; this.Name = name.Trim().RemoveInvalidChars(); this.FullText = this.Name + " " + this.Tag; this.HomeSite = site; this.Status = (byte)status; SeriesFolderPath ="Sites\\"+ HomeSite.Name + "\\" + Name + "\\"; }
/// <summary> /// Stops the progress info /// </summary> public void Dispose() { var end = DateTime.Now; if (status == ProgressStatus.Running) { status = ProgressStatus.Done; } this.TimeMilliseconds = (end - start).TotalMilliseconds; }
public BlogTruyenSeries(string siteUri, string name, string tag, MangaSite site, ProgressStatus status) { this.SiteUri = siteUri; this.Tag = tag; this.Name = name.Trim().RemoveInvalidChars(); this.FullText = Name.RemoveVietnameseSign() + " " + this.Tag; this.HomeSite = site; this.Status = (byte)status; seriesPath = HomeSite.Name + "\\" + Name + "\\"; }
public Manga24hSeries(string siteUri, string name, string tag, MangaSite site, ProgressStatus status) { base.SiteUri = siteUri; base.Tag = tag; base.Name = Utility.RemoveInvalidChars(name.Trim()); this.FullText = Name.RemoveVietnameseSign() + " " + this.Tag; this.HomeSite = site; base.Status = (byte)status; base.SeriesFolderPath = @"Sites\" + this.HomeSite.Name + @"\" + base.Name + @"\"; base.Discriminator = "Manga24hSeries"; }
public StreamWithReadProgress(Stream innerStream, TimeSpan progressInterval) { this.innerStream = innerStream; this.progressInterval = progressInterval; this.readStatus = new ProgressStatus(0, this.innerStream.Length, new ComputeStats()); this.progressTracker = new ProgressTracker(this.readStatus, Program.SyncOutput.ProgressOperationStatus, Program.SyncOutput.ProgressOperationComplete, this.progressInterval); }
public Task(Game _game, MediaType _mediaType, ManagerStatus _managerStatus, ProgressStatus _progressStatus, PresentationType _presentationType, String _mediaName, int _operatorID) : base() { game = _game; mediaType = _mediaType; managerStatus = _managerStatus; progressStatus = _progressStatus; presentationType = _presentationType; mediaName = _mediaName; operatorID = _operatorID; AvateeringXNA.AllTasks.Add(this); }
public override void GetSeries(ProgressStatus progressStatus) { if (MangaSeries == null || MangaSeries.Count < 1) { MangaSeries = new List<MangaSeries>(); HtmlDocument doc = new HtmlDocument(); int i = 0; int max = 0; if (progressStatus == ProgressStatus.Unknown) { i = 1; max = (int)ProgressStatus.Suspended + 1; } else { i = (int)progressStatus; max = (int)progressStatus + 1; } for (; i < max; i++) { doc.LoadHtml(HttpUtility.GetResponseString(MangaListUri + "/" + i)); var query = doc.DocumentNode.SelectNodes("//div[@class='row mangalist']//div[@class='pagination'][1]//a"); if (query != null) { var node = query.LastOrDefault(); var hrefValue = node != null ? node.Attributes["href"].Value : ""; int totalPages = Int32.Parse(hrefValue.Substring(hrefValue.LastIndexOf('/') + 1)); for (int j = 1; j <= totalPages; j++) { MangaSeries series = null; doc.LoadHtml(HttpUtility.GetResponseString(MangaListUri + "/" + i + "/page/" + j)); query = doc.DocumentNode.SelectNodes("//div[@class='row mangalist']//div[@class='descr']"); foreach (var _node in query) { node = _node.ChildNodes["a"]; series = new KissMangaSeries(SiteUri + "/" + node.Attributes["href"].Value, node.InnerText, _node.ChildNodes[5].ChildNodes[1].InnerText, this, (ProgressStatus)i); series.HomeSite = this; MangaSeries.Add(series); } } } } } }
public bool Synchronize() { var uploadStatus = new ProgressStatus(alreadyUploadedData, alreadyUploadedData + dataToUpload, new ComputeStats()); using(new ServicePointHandler(blob.Uri, this.maxParallelism)) using(new ProgressTracker(uploadStatus)) { var loopResult = Parallel.ForEach(dataWithRanges, () => new CloudPageBlob(blob.Uri, blob.ServiceClient.Credentials), (dwr, b) => { using (dwr) { var md5HashOfDataChunk = GetBase64EncodedMd5Hash(dwr.Data, (int) dwr.Range.Length); using (var stream = new MemoryStream(dwr.Data, 0, (int)dwr.Range.Length)) { b.Properties.ContentMD5 = md5HashOfDataChunk; b.WritePages(stream, dwr.Range.StartIndex); } } uploadStatus.AddToProcessedBytes((int) dwr.Range.Length); }, this.maxParallelism); if(loopResult.IsExceptional) { if (loopResult.Exceptions.Any()) { Program.SyncOutput.ErrorUploadFailedWithExceptions(loopResult.Exceptions); //TODO: throw an AggregateException return false; } } else { using(var bdms = new BlobMetaDataScope(new CloudPageBlob(blob.Uri, blob.ServiceClient.Credentials))) { bdms.Current.SetBlobMd5Hash(md5Hash); bdms.Current.CleanUpUploadMetaData(); bdms.Complete(); } } } return true; }
public static ProcessedIrtAverages ProcessRetentionTimes(IProgressMonitor monitor, IEnumerable <IRetentionTimeProvider> providers, int countProviders, DbIrtPeptide[] standardPeptideList, DbIrtPeptide[] items) { IProgressStatus status = new ProgressStatus(Resources.LibraryGridViewDriver_ProcessRetentionTimes_Adding_retention_times); var dictProviderData = new List <KeyValuePair <string, RetentionTimeProviderData> >(); var dictPeptideAverages = new Dictionary <Target, IrtPeptideAverages>(); var runCount = 0; foreach (var retentionTimeProvider in providers) { if (monitor.IsCanceled) { return(null); } var message = string.Format(Resources.LibraryGridViewDriver_ProcessRetentionTimes_Converting_retention_times_from__0__, retentionTimeProvider.Name); monitor.UpdateProgress(status = status.ChangeMessage(message)); runCount++; var data = new RetentionTimeProviderData(retentionTimeProvider, standardPeptideList.OrderBy(peptide => peptide.Irt)); if (data.RegressionSuccess || data.CalcRegressionWith(retentionTimeProvider, standardPeptideList, items)) { // Trace.WriteLine(string.Format("slope = {0}, intercept = {1}", data.RegressionRefined.Slope, data.RegressionRefined.Intercept)); AddRetentionTimesToDict(retentionTimeProvider, data.RegressionRefined, dictPeptideAverages, standardPeptideList); } dictProviderData.Add(new KeyValuePair <string, RetentionTimeProviderData>(retentionTimeProvider.Name, data)); monitor.UpdateProgress(status = status.ChangePercentComplete(runCount * 100 / countProviders)); } monitor.UpdateProgress(status.Complete()); return(new ProcessedIrtAverages(dictPeptideAverages, dictProviderData)); }
public override void Interact() { ProgressStatus status = PlayerScan.instance.progressStatus; if (status == ProgressStatus.E_Start || status == ProgressStatus.E_JungGotShocked) { DialogueManager.instance.PlayDialogue("prologue_5"); } else if (status == ProgressStatus.E_ChangeClothes || status == ProgressStatus.E_ChangeClothes2) { DialogueManager.instance.PlayDialogue("prologue_6"); } else if (status == ProgressStatus.E_EatMedicine) { GoToDreamMap(); PlayerScan.instance.progressStatus = ProgressStatus.E_Sleep; } else if (status == ProgressStatus.E_ErrandFinished) { GoToDreamMap(); } else if (status == ProgressStatus.E_Chapter2Start) { DialogueManager.instance.PlayDialogue("chapter_2_2"); //늦기전에 학교에 가자 } else if (status == ProgressStatus.E_EatMedicine2) { GoToDreamMap(() => { DialogueManager.instance.PlayDialogue("chapter_2_8", false, () => { // 나무 : 오늘 하루는 어땠어? ~~ ObjectManager.GetObject <MirrorRoomDoor>().isOpened = true; SoundManager.PlaySFX("door-open"); // 문열리는 효과음 }); }); } }
private void Rebuild(UndoBuffer undoBuffer) { var rebuildLock = RebuildLock(); ResetMeshWrapperMeshes(Object3DPropertyFlags.All, CancellationToken.None); // spin up a task to remove holes from the objects in the group ApplicationController.Instance.Tasks.Execute( "Combine".Localize(), (reporter, cancellationToken) => { var progressStatus = new ProgressStatus(); reporter.Report(progressStatus); var participants = this.Descendants().Where(o => o.OwnerID == this.ID).ToList(); try { if (participants.Count() > 1) { Combine(participants, cancellationToken, reporter); } } catch { } UiThread.RunOnIdle(() => { rebuildLock.Dispose(); base.Invalidate(new InvalidateArgs(this, InvalidateType.Content)); }); return(Task.CompletedTask); }); }
public bool Export(IProgressMonitor progressMonitor, ref ProgressStatus status, ViewInfo viewInfo, TextWriter writer, DsvWriter dsvWriter) { progressMonitor = progressMonitor ?? new UncancellableProgressMonitor(); using (var bindingListSource = new BindingListSource()) { bindingListSource.SetViewContext(this, viewInfo); if (progressMonitor.IsCanceled) { return(false); } progressMonitor.UpdateProgress(status = status.ChangePercentComplete(5) .ChangeMessage(Resources.ExportReportDlg_ExportReport_Writing_report)); WriteDataWithStatus(progressMonitor, ref status, writer, bindingListSource, dsvWriter); if (progressMonitor.IsCanceled) { return(false); } writer.Flush(); progressMonitor.UpdateProgress(status = status.Complete()); } return(true); }
private void Connection_CommunicationStateChanged(object s, EventArgs e) { var printerConnection = this.Connection; if (printerConnection.Printing || printerConnection.Paused) { switch (printerConnection.DetailedPrintingState) { case DetailedPrintingState.HeatingBed: ApplicationController.Instance.Tasks.Execute( "Heating Bed".Localize(), this, (reporter, cancellationToken) => { waitingForHeat = HeatingStatus.Bed; var progressStatus = new ProgressStatus(); heatStart = printerConnection.ActualBedTemperature; heatDistance = Math.Abs(printerConnection.TargetBedTemperature - heatStart); while (heatDistance > 0 && waitingForHeat == HeatingStatus.Bed) { var remainingDistance = Math.Abs(printerConnection.TargetBedTemperature - printerConnection.ActualBedTemperature); progressStatus.Status = $"Heating Bed ({printerConnection.ActualBedTemperature:0}/{printerConnection.TargetBedTemperature:0})"; progressStatus.Progress0To1 = (heatDistance - remainingDistance) / heatDistance; reporter.Report(progressStatus); Thread.Sleep(10); } return(Task.CompletedTask); }, new RunningTaskOptions() { ReadOnlyReporting = true }); break; case DetailedPrintingState.HeatingT0: ApplicationController.Instance.Tasks.Execute( "Heating Nozzle 1".Localize(), this, (reporter, cancellationToken) => { waitingForHeat = HeatingStatus.T0; var progressStatus = new ProgressStatus(); heatStart = printerConnection.GetActualHotendTemperature(0); heatDistance = Math.Abs(printerConnection.GetTargetHotendTemperature(0) - heatStart); while (heatDistance > 0 && waitingForHeat == HeatingStatus.T0) { var currentDistance = Math.Abs(printerConnection.GetTargetHotendTemperature(0) - printerConnection.GetActualHotendTemperature(0)); progressStatus.Progress0To1 = (heatDistance - currentDistance) / heatDistance; progressStatus.Status = $"Heating Nozzle ({printerConnection.GetActualHotendTemperature(0):0}/{printerConnection.GetTargetHotendTemperature(0):0})"; reporter.Report(progressStatus); Thread.Sleep(1000); } return(Task.CompletedTask); }, new RunningTaskOptions() { ReadOnlyReporting = true }); break; case DetailedPrintingState.HeatingT1: ApplicationController.Instance.Tasks.Execute( "Heating Nozzle 2".Localize(), this, (reporter, cancellationToken) => { waitingForHeat = HeatingStatus.T1; var progressStatus = new ProgressStatus(); heatStart = printerConnection.GetActualHotendTemperature(1); heatDistance = Math.Abs(printerConnection.GetTargetHotendTemperature(1) - heatStart); while (heatDistance > 0 && waitingForHeat == HeatingStatus.T1) { var currentDistance = Math.Abs(printerConnection.GetTargetHotendTemperature(1) - printerConnection.GetActualHotendTemperature(1)); progressStatus.Progress0To1 = (heatDistance - currentDistance) / heatDistance; progressStatus.Status = $"Heating Nozzle ({printerConnection.GetActualHotendTemperature(1):0}/{printerConnection.GetTargetHotendTemperature(1):0})"; reporter.Report(progressStatus); Thread.Sleep(1000); } return(Task.CompletedTask); }, new RunningTaskOptions() { ReadOnlyReporting = true }); break; case DetailedPrintingState.HomingAxis: case DetailedPrintingState.Printing: default: // clear any existing waiting states waitingForHeat = HeatingStatus.None; break; } } else { // turn off any running temp feedback tasks waitingForHeat = HeatingStatus.None; } }
public static extern void Finish(int id, ProgressStatus status = ProgressStatus.Succeeded);
public void Update() { ProgressStatus status = new ProgressStatus(0, "Starting"); LockFreeUpdate(ref status, s => new ProgressStatus(s.PercentComplete + 1, s.Message)); }
public SrmDocument Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount, bool isMinutes, bool removeMissing = false, bool changePeaks = true) { IProgressStatus status = new ProgressStatus(Resources.PeakBoundaryImporter_Import_Importing_Peak_Boundaries); double timeConversionFactor = isMinutes ? 1.0 : 60.0; int linesRead = 0; int progressPercent = 0; var docNew = (SrmDocument)Document.ChangeIgnoreChangingChildren(true); var docReference = docNew; var sequenceToNode = MakeSequenceDictionary(Document); var fileNameToFileMatch = new Dictionary <string, ChromSetFileMatch>(); var trackAdjustedResults = new HashSet <ResultsKey>(); var modMatcher = new ModificationMatcher(); var canonicalSequenceDict = new Dictionary <string, string>(); // Add annotations as possible columns var allFieldNames = new List <string[]>(FIELD_NAMES); allFieldNames.AddRange(from def in Document.Settings.DataSettings.AnnotationDefs where def.AnnotationTargets.Contains(AnnotationDef.AnnotationTarget.precursor_result) select new[] { def.Name }); string line = reader.ReadLine(); linesRead++; int[] fieldIndices; int fieldsTotal; // If we aren't changing peaks, allow start and end time to be missing var requiredFields = changePeaks ? REQUIRED_FIELDS : REQUIRED_NO_CHROM; char correctSeparator = ReadFirstLine(line, allFieldNames, requiredFields, out fieldIndices, out fieldsTotal); while ((line = reader.ReadLine()) != null) { linesRead++; if (progressMonitor != null) { if (progressMonitor.IsCanceled) { return(Document); } int progressNew = (int)(linesRead * 100 / lineCount); if (progressPercent != progressNew) { progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew)); progressPercent = progressNew; } } var dataFields = new DataFields(fieldIndices, line.ParseDsvFields(correctSeparator), allFieldNames); if (dataFields.Length != fieldsTotal) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_, linesRead, dataFields.Length, fieldsTotal)); } string modifiedPeptideString = dataFields.GetField(Field.modified_peptide); string fileName = dataFields.GetField(Field.filename); bool isDecoy = dataFields.IsDecoy(linesRead); IList <IdentityPath> pepPaths; if (!sequenceToNode.TryGetValue(Tuple.Create(modifiedPeptideString, isDecoy), out pepPaths)) { string canonicalSequence; if (!canonicalSequenceDict.TryGetValue(modifiedPeptideString, out canonicalSequence)) { if (modifiedPeptideString.Any(c => c < 'A' || c > 'Z')) { modMatcher.CreateMatches(Document.Settings, new List <string> { modifiedPeptideString }, Settings.Default.StaticModList, Settings.Default.HeavyModList); var nodeForModPep = modMatcher.GetModifiedNode(modifiedPeptideString); if (nodeForModPep == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_, modifiedPeptideString, linesRead)); } nodeForModPep = nodeForModPep.ChangeSettings(Document.Settings, SrmSettingsDiff.ALL); // Convert the modified peptide string into a standardized form that // converts unimod, names, etc, into masses, eg [+57.0] canonicalSequence = nodeForModPep.ModifiedTarget.Sequence; canonicalSequenceDict.Add(modifiedPeptideString, canonicalSequence); } } if (null != canonicalSequence) { sequenceToNode.TryGetValue(Tuple.Create(canonicalSequence, isDecoy), out pepPaths); } } if (null == pepPaths) { UnrecognizedPeptides.Add(modifiedPeptideString); continue; } Adduct charge; bool chargeSpecified = dataFields.TryGetCharge(linesRead, out charge); string sampleName = dataFields.GetField(Field.sample_name); double?apexTime = dataFields.GetTime(Field.apex_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_time_, linesRead); double?startTime = dataFields.GetTime(Field.start_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_, linesRead); double?endTime = dataFields.GetTime(Field.end_time, timeConversionFactor, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_, linesRead); // Error if only one of startTime and endTime is null if (startTime == null && endTime != null) { if (changePeaks) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_start_time_on_line__0_, linesRead)); } endTime = null; } if (startTime != null && endTime == null) { if (changePeaks) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Missing_end_time_on_line__0_, linesRead)); } startTime = null; } // Add filename to second dictionary if not yet encountered ChromSetFileMatch fileMatch; if (!fileNameToFileMatch.TryGetValue(fileName, out fileMatch)) { fileMatch = Document.Settings.MeasuredResults.FindMatchingMSDataFile(MsDataFileUri.Parse(fileName)); fileNameToFileMatch.Add(fileName, fileMatch); } if (fileMatch == null) { UnrecognizedFiles.Add(fileName); continue; } var chromSet = fileMatch.Chromatograms; string nameSet = chromSet.Name; ChromFileInfoId[] fileIds; if (sampleName == null) { fileIds = chromSet.MSDataFileInfos.Select(x => x.FileId).ToArray(); } else { var sampleFile = chromSet.MSDataFileInfos.FirstOrDefault(info => Equals(sampleName, info.FilePath.GetSampleName())); if (sampleFile == null) { throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Sample__0__on_line__1__does_not_match_the_file__2__, sampleName, linesRead, fileName)); } fileIds = new[] { sampleFile.FileId }; } // Define the annotations to be added var annotations = dataFields.GetAnnotations(); if (!changePeaks) { if (apexTime.HasValue) { annotations.Add(ComparePeakBoundaries.APEX_ANNOTATION, dataFields.GetField(Field.apex_time)); } if (startTime.HasValue && endTime.HasValue) { annotations.Add(ComparePeakBoundaries.START_TIME_ANNOTATION, dataFields.GetField(Field.start_time)); annotations.Add(ComparePeakBoundaries.END_TIME_ANNOTATION, dataFields.GetField(Field.end_time)); } } AnnotationsAdded = annotations.Keys.ToList(); // Loop over all the transition groups in that peptide to find matching charge, // or use all transition groups if charge not specified bool foundSample = false; foreach (var pepPath in pepPaths) { var nodePep = (PeptideDocNode)docNew.FindNode(pepPath); foreach (TransitionGroupDocNode groupNode in nodePep.Children) { if (chargeSpecified && charge != groupNode.TransitionGroup.PrecursorAdduct) { continue; } // Loop over the files in this groupNode to find the correct sample // Change peak boundaries for the transition group foreach (var fileId in GetApplicableFiles(fileIds, groupNode)) { var groupPath = new IdentityPath(pepPath, groupNode.Id); // Attach annotations if (annotations.Any()) { docNew = docNew.AddPrecursorResultsAnnotations(groupPath, fileId, annotations); } // Change peak var filePath = chromSet.GetFileInfo(fileId).FilePath; if (changePeaks) { docNew = docNew.ChangePeak(groupPath, nameSet, filePath, null, startTime, endTime, UserSet.IMPORTED, null, false); } // For removing peaks that are not in the file, if removeMissing = true trackAdjustedResults.Add(new ResultsKey(fileId.GlobalIndex, groupNode.Id)); foundSample = true; } } } if (!foundSample) { UnrecognizedChargeStates.Add(new UnrecognizedChargeState(charge, fileName, modifiedPeptideString)); } } // Remove peaks from the document that weren't in the file. if (removeMissing) { docNew = RemoveMissing(docNew, trackAdjustedResults, changePeaks); } // If nothing has changed, return the old Document before ChangeIgnoreChangingChildren was turned off if (!ReferenceEquals(docNew, docReference)) { Document = (SrmDocument)Document.ChangeIgnoreChangingChildren(false).ChangeChildrenChecked(docNew.Children); } return(Document); }
/// <summary> /// Update the list of layers. Ignores filenames that exist as layers /// </summary> /// <param name="newLayers"></param> /// <param name="progress"></param> /// <returns></returns> public bool GatherLayers(LayerCollection newLayers, ProgressStatus progress) { bool bOK = true; string directoryName = LayerDirectoryName; DirectoryInfo layerFolder = new DirectoryInfo(directoryName); if (!layerFolder.Exists) return bOK; FileInfo[] files = layerFolder.GetFiles("*" + IScene.LayerFileExtension); // filter out *.Layer FileInfo[] filesXML = layerFolder.GetFiles("*" + IScene.LayerFileExtensionXML); // filter out *.LayerXML // add layer references: string refFile = LayerReferenceFile; if (File.Exists(refFile)) { List<FileInfo> allFiles = new List<FileInfo>(files); ; TextReader txt = new StreamReader(refFile); while (true) { string line = txt.ReadLine(); if (line == null) break; if (line.StartsWith("//") || !line.EndsWith(IScene.LayerFileExtension)) // commented out or not .Layer? continue; string externalname = this.Project.MakeAbsolute(line); if (File.Exists(externalname)) allFiles.Add(new FileInfo(externalname)); } txt.Close(); files = allFiles.ToArray(); } FilterLayerFiles(files, null); FilterLayerFiles(filesXML, files); bOK = LoadLayers(newLayers, progress, files); bOK &= LoadLayersFromXML(newLayers, progress, filesXML); return bOK; }
public SyncServiceProgressEventArgs(SyncConfiguration configuration, SyncEventType syncEventType, ProgressStatus status, int percent, string message, Exception innerException = null) { this.Configuration = configuration; this.EventType = syncEventType; this.Status = status; this.Percent = percent; this.Message = message; this.InnerException = innerException; }
public static Mesh Do(Mesh inMeshA, Matrix4X4 matrixA, // mesh B Mesh inMeshB, Matrix4X4 matrixB, // operation int operation, // reporting IProgress <ProgressStatus> reporter, double amountPerOperation, double percentCompleted, ProgressStatus progressStatus, CancellationToken cancellationToken) { bool externalAssemblyExists = File.Exists(BooleanAssembly); if (externalAssemblyExists && IntPtr.Size == 8) // only try to run the improved booleans if we are 64 bit and it is there { IntPtr pVc = IntPtr.Zero; IntPtr pFc = IntPtr.Zero; try { double[] va; int[] fa; va = inMeshA.Vertices.ToDoubleArray(matrixA); fa = inMeshA.Faces.ToIntArray(); double[] vb; int[] fb; vb = inMeshB.Vertices.ToDoubleArray(matrixB); fb = inMeshB.Faces.ToIntArray(); DoBooleanOperation(va, va.Length, fa, fa.Length, // object B vb, vb.Length, fb, fb.Length, // operation operation, // results out pVc, out int vcCount, out pFc, out int fcCount); var vcArray = new double[vcCount]; Marshal.Copy(pVc, vcArray, 0, vcCount); var fcArray = new int[fcCount]; Marshal.Copy(pFc, fcArray, 0, fcCount); return(new Mesh(vcArray, fcArray)); } catch (Exception ex) { ApplicationController.Instance.LogInfo("Error performing boolean operation: "); ApplicationController.Instance.LogInfo(ex.Message); } finally { if (pVc != IntPtr.Zero) { DeleteDouble(ref pVc); } if (pFc != IntPtr.Zero) { DeleteInt(ref pFc); } progressStatus.Progress0To1 = percentCompleted + amountPerOperation; reporter.Report(progressStatus); } } else { Console.WriteLine($"libigl skipped - AssemblyExists: {externalAssemblyExists}; Is64Bit: {IntPtr.Size == 8};"); } var meshA = inMeshA.Copy(CancellationToken.None); meshA.Transform(matrixA); var meshB = inMeshB.Copy(CancellationToken.None); meshB.Transform(matrixB); switch (operation) { case 0: return(PolygonMesh.Csg.CsgOperations.Union(meshA, meshB, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + (amountPerOperation * progress0To1); reporter?.Report(progressStatus); }, cancellationToken)); case 1: return(PolygonMesh.Csg.CsgOperations.Subtract(meshA, meshB, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + (amountPerOperation * progress0To1); reporter?.Report(progressStatus); }, cancellationToken)); case 2: return(PolygonMesh.Csg.CsgOperations.Intersect(meshA, meshB, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + (amountPerOperation * progress0To1); reporter.Report(progressStatus); }, cancellationToken)); } return(null); }
public bool LoadLayersFromXML(LayerCollection newLayers, ProgressStatus progress, FileInfo[] files) { bool bOK = true; float fPercentage = 0.0f; foreach (FileInfo fileInfo in files) { fPercentage += 100.0f / (float)files.Length; if (fileInfo == null || (fileInfo.Attributes & FileAttributes.Directory) != 0) // file info can be null continue; if (string.Compare(fileInfo.Extension, IScene.LayerFileExtensionXML, true) != 0) continue; try { string absfilename = fileInfo.FullName; string filename = fileInfo.Name; using (XmlTextReader xmlReader = new XmlTextReader(absfilename)) { XmlDocument doc = new XmlDocument(); doc.Load(xmlReader); if (doc.DocumentElement == null) throw new Exception("XML does not contain root node"); IEnumerator nodes = doc.DocumentElement.GetEnumerator(); while (nodes.MoveNext()) { XmlElement node = nodes.Current as XmlElement; if (node == null || node.Name != "layer") continue; string classname = node.GetAttribute("class"); string name = node.GetAttribute("name"); string uid = node.GetAttribute("uid"); if (string.IsNullOrEmpty(name)) name = Path.GetFileNameWithoutExtension(filename); Type t = EditorManager.ShapeFactory.GetTypeByName(classname, typeof(Layer), false); if (t == null) t = typeof(Layer); Layer layer = Activator.CreateInstance(t, new object[1] { name }) as Layer; if (layer == null) throw new Exception("Could not instantiate Layer"); layer.SetLayerFileNameInternal(filename); // same filename but will replace extension if (!string.IsNullOrEmpty(uid)) { layer.SetLayerIDInternal(Convert.ToUInt32(uid)); } newLayers.Add(layer); // apply property/value pairs to layer SerializationHelper.ApplyXMLProperties(node, layer, false); // parse for shapes IEnumerator propNodes = node.GetEnumerator(); while (propNodes.MoveNext()) { XmlElement propNode = propNodes.Current as XmlElement; if (propNode == null) continue; if (propNode.Name == "shapes") { // use prefab functionality to parse it PrefabDesc dummyPrefab = new PrefabDesc(null); ShapeCollection shapes = dummyPrefab.CreateInstances(propNode, true, true); if (shapes != null) { layer.Root.SetChildCollectionInternal(shapes); foreach (ShapeBase shape in shapes) { shape.SetParentInternal(layer.Root); shape.SetParentLayerInternal(layer); } } if (!string.IsNullOrEmpty(dummyPrefab.LastError)) { string msg = "An error occurred while parsing file: \n\n" + filename + "\n\nThe layer won't contain any shapes.\nDetailed message:\n" + dummyPrefab.LastError; EditorManager.ShowMessageBox(msg, "Error parsing layer file", MessageBoxButtons.OK, MessageBoxIcon.Error); } continue; } } } } } catch (Exception ex) { EditorManager.DumpException(ex, false); return false; } } return bOK; }
// ReSharper restore UnusedMember.Local private bool Load(ILoadMonitor loader) { ProgressStatus status = new ProgressStatus(string.Format(Resources.BiblioSpecLibrary_Load_Loading__0__library, Path.GetFileName(FilePath))); loader.UpdateProgress(status); long lenRead = 0; // AdlerChecksum checksum = new AdlerChecksum(); try { // Use a buffered stream for initial read BufferedStream stream = new BufferedStream(CreateStream(loader), 32 * 1024); int countHeader = (int)LibHeaders.count * 4; byte[] libHeader = new byte[countHeader]; if (stream.Read(libHeader, 0, countHeader) != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_library_header_File_may_be_corrupted); } lenRead += countHeader; // Check the first byte of the primary version number to determine // whether the format is little- or big-endian. Little-endian will // have the version number in this byte, while big-endian will have zero. if (libHeader[(int)LibHeaders.version1 * 4] == 0) { _bigEndian = true; } int numSpectra = GetInt32(libHeader, (int)LibHeaders.num_spectra); var dictLibrary = new Dictionary <LibKey, BiblioSpectrumInfo>(numSpectra); var setSequences = new HashSet <LibSeqKey>(); string revStr = string.Format("{0}.{1}", // Not L10N GetInt32(libHeader, (int)LibHeaders.version1), GetInt32(libHeader, (int)LibHeaders.version2)); Revision = float.Parse(revStr, CultureInfo.InvariantCulture); // checksum.MakeForBuff(libHeader, AdlerChecksum.ADLER_START); countHeader = (int)SpectrumHeaders.count * 4; byte[] specHeader = new byte[1024]; byte[] specSequence = new byte[1024]; for (int i = 0; i < numSpectra; i++) { int percent = i * 100 / numSpectra; if (status.PercentComplete != percent) { // Check for cancellation after each integer change in percent loaded. if (loader.IsCanceled) { loader.UpdateProgress(status.Cancel()); return(false); } // If not cancelled, update progress. loader.UpdateProgress(status = status.ChangePercentComplete(percent)); } // Read spectrum header int bytesRead = stream.Read(specHeader, 0, countHeader); if (bytesRead != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted); } // If this is the first header, and the sequence length is zero, // then this is a Linux format library. Switch to linux format, // and start over. if (i == 0 && GetInt32(specHeader, (int)SpectrumHeaders.seq_len) == 0) { _linuxFormat = true; stream.Seek(lenRead, SeekOrigin.Begin); // Re-ead spectrum header countHeader = (int)SpectrumHeadersLinux.count * 4; bytesRead = stream.Read(specHeader, 0, countHeader); if (bytesRead != countHeader) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_header_File_may_be_corrupted); } } lenRead += bytesRead; // checksum.MakeForBuff(specHeader, checksum.ChecksumValue); int charge = GetInt32(specHeader, (int)SpectrumHeaders.charge); if (charge > TransitionGroup.MAX_PRECURSOR_CHARGE) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Invalid_precursor_charge_found_File_may_be_corrupted); } int numPeaks = GetInt32(specHeader, (int)SpectrumHeaders.num_peaks); int seqLength = GetInt32(specHeader, (_linuxFormat ? (int)SpectrumHeadersLinux.seq_len : (int)SpectrumHeaders.seq_len)); int copies = GetInt32(specHeader, (_linuxFormat ? (int)SpectrumHeadersLinux.copies : (int)SpectrumHeaders.copies)); // Read sequence information int countSeq = (seqLength + 1) * 2; if (stream.Read(specSequence, 0, countSeq) != countSeq) { throw new InvalidDataException(Resources.BiblioSpecLibrary_Load_Data_truncation_in_spectrum_sequence_File_may_be_corrupted); } lenRead += countSeq; // checksum.MakeForBuff(specSequence, checksum.ChecksumValue); // Store in dictionary if (IsUnmodified(specSequence, seqLength + 1, seqLength)) { // These libraries should not have duplicates, but just in case. // CONSIDER: Emit error about redundancy? // These legacy libraries assume [+57.0] modified Cysteine LibKey key = new LibKey(GetCModified(specSequence, ref seqLength), 0, seqLength, charge); if (!dictLibrary.ContainsKey(key)) { dictLibrary.Add(key, new BiblioSpectrumInfo((short)copies, (short)numPeaks, lenRead)); } setSequences.Add(new LibSeqKey(key)); } // Read over peaks int countPeaks = 2 * sizeof(Single) * numPeaks; stream.Seek(countPeaks, SeekOrigin.Current); // Skip spectrum lenRead += countPeaks; // checksum.MakeForBuff(specPeaks, checksum.ChecksumValue); } // Checksum = checksum.ChecksumValue; _dictLibrary = dictLibrary; _setSequences = setSequences; loader.UpdateProgress(status.Complete()); return(true); } catch (InvalidDataException x) { loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (IOException x) { loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } catch (Exception x) { x = new Exception(string.Format(Resources.BiblioSpecLibrary_Load_Failed_loading_library__0__, FilePath), x); loader.UpdateProgress(status.ChangeErrorException(x)); return(false); } finally { if (ReadStream != null) { // Close the read stream to ensure we never leak it. // This only costs on extra open, the first time the // active document tries to read. try { ReadStream.CloseStream(); } catch (IOException) {} } } }
private void wc_DownloadProgressChanged(object sender, DownloadProgressChangedEventArgs e) { ProgressMonitor.UpdateProgress(ProgressStatus = ProgressStatus.ChangePercentComplete(e.ProgressPercentage)); }
public void Download(string destination) { DeleteTempVhdIfExist(destination); Console.WriteLine("\t\tDownloading blob '{0}' ...", blobUri.BlobName); Console.WriteLine("\t\tImage download start time: '{0}'", DateTime.UtcNow.ToString("o")); var blobHandle = new BlobHandle(blobUri, this.StorageAccountKey); const int megaByte = 1024 * 1024; var ranges = blobHandle.GetUploadableRanges(); var bufferManager = BufferManager.CreateBufferManager(Int32.MaxValue, 20 * megaByte); var downloadStatus = new ProgressStatus(0, ranges.Sum(r => r.Length), new ComputeStats()); Trace.WriteLine(String.Format("Total Data:{0}", ranges.Sum(r => r.Length))); const int maxParallelism = 24; using (new ServicePointHandler(this.blobUri.Uri, maxParallelism)) using (new ProgressTracker(downloadStatus, Program.SyncOutput.ProgressUploadStatus, Program.SyncOutput.ProgressUploadComplete, TimeSpan.FromSeconds(1))) { // if(SparseFile.VolumeSupportsSparseFiles(destination)) // { // using(var fileStream = SparseFile.Create(destination)) // { // foreach (var emptyRange in blobHandle.GetEmptyRanges()) // { // SparseFile.SetSparseRange(fileStream.SafeFileHandle, emptyRange.StartIndex, emptyRange.Length); // } // } // } using (var fileStream = new FileStream(destination, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Write, 8 * megaByte, FileOptions.WriteThrough)) { fileStream.SetLength(0); fileStream.SetLength(blobHandle.Length); LoopResult lr = Parallel.ForEach<IndexRange, Stream>(ranges, blobHandle.OpenStream, (r, b) => { b.Seek(r.StartIndex, SeekOrigin.Begin); byte[] buffer = this.EnsureReadAsSize(b, (int)r.Length, bufferManager); lock (fileStream) { Trace.WriteLine(String.Format("Range:{0}", r)); fileStream.Seek(r.StartIndex, SeekOrigin.Begin); fileStream.Write(buffer, 0, (int)r.Length); fileStream.Flush(); } downloadStatus.AddToProcessedBytes((int)r.Length); }, pbwlf => { pbwlf.Dispose(); }, maxParallelism); if (lr.IsExceptional) { Console.WriteLine("\t\tException(s) happened"); for (int i = 0; i < lr.Exceptions.Count; i++) { Console.WriteLine("{0} -> {1}", i, lr.Exceptions[i]); } } } } Console.WriteLine("\t\tImage download end time : '{0}'", DateTime.UtcNow.ToString("o")); }
public void Process(MGFile file, ProgressStatus reporter) { LazyInitialization(); //prompt user for series name lookups?? int? seriesID = GetSeriesID(file); if (seriesID == null) return; TvdbSeries series = GetSeries(seriesID.Value); file.Tags.Set(TVShowDataStoreTemplate.SeriesTitle, series.SeriesName); file.Tags.Set(TVShowDataStoreTemplate.SeriesDescription, series.Overview); file.Tags.Set(TVShowDataStoreTemplate.SeriesBanner, LoadBannerToPath(series.SeriesBanners.FirstOrDefault())); file.Tags.Set(TVShowDataStoreTemplate.SeriesPoster, LoadBannerToPath(series.PosterBanners.FirstOrDefault())); TvdbEpisode exactEpisode = GetEpisode(series, file); if (exactEpisode != null) { file.Tags.Set(TVShowDataStoreTemplate.EpisodeTitle, exactEpisode.EpisodeName); file.Tags.Set(TVShowDataStoreTemplate.EpisodeDescription, exactEpisode.Overview); file.Tags.Set(TVShowDataStoreTemplate.EpisodeID, exactEpisode.Id); file.Tags.Set(TVShowDataStoreTemplate.EpisodeFirstAired, exactEpisode.FirstAired); file.Tags.Set(TVShowDataStoreTemplate.EpisodeBanner, LoadBannerToPath(exactEpisode.Banner)); } }
private void on_ProgressChanged(object sender, ProgressStatus.ProgressChangedArgs e) { progressBar.Value = (int)e.Progress.Percentage; if (_currentScene != null) statusString.Text = string.Format("{0} ({1}/{2}): {3}", Path.GetFileName(_currentScene), _scenesProcessed + 1, _scenesFound, e.Progress.StatusString); else statusString.Text = e.Progress.StatusString; Refresh(); TestManager.Helpers.ProcessEvents(); }
/// <summary> /// Call when process is aborted /// </summary> public void Abort() { status = ProgressStatus.Aborted; }
/// <summary> /// Call when process is done /// </summary> public void Done() { status = ProgressStatus.Done; }
public WebDownloadClient(IProgressMonitor progressMonitor, ProgressStatus progressStatus) { ProgressMonitor = progressMonitor; ProgressStatus = progressStatus; }
private void Transcode(MGFile file, ProgressStatus progress, Preset preset) { log.InfoFormat("Encoding using preset {0}...", preset.Name); var p = new Process(); p.StartInfo = new ProcessStartInfo(GetEncoderPath(preset.Encoder)); var outputPath = new Uri(file.FileName + preset.Extension); p.StartInfo.Arguments = BuildCommandLine(preset, file, outputPath); log.InfoFormat("{0} {1}", p.StartInfo.FileName, p.StartInfo.Arguments); p.StartInfo.CreateNoWindow = true; p.StartInfo.UseShellExecute = false; p.StartInfo.RedirectStandardError = true; p.StartInfo.RedirectStandardOutput = true; int totalFrames = file.Tags.GetInt(TVShowDataStoreTemplate.FrameCount).Value; var statusParser = new EncoderStatusParser(preset.Encoder, totalFrames, progress); p.OutputDataReceived += statusParser.OutputHandler; p.ErrorDataReceived += statusParser.OutputHandler; log.Debug("Starting..."); p.Start(); p.BeginErrorReadLine(); p.BeginOutputReadLine(); p.WaitForExit(); log.Debug("Done."); if (p.ExitCode != 0) log.ErrorFormat("Encoding failed with error code {0}.", p.ExitCode); else if (!File.Exists(outputPath.LocalPath)) log.ErrorFormat("Encoding failed- output file doesn't exist: \"{0}\".", outputPath.LocalPath); else { MGFile newFile = m_DataStore.AddNewFile(outputPath); Mp4TagWriterPlugin.WriteMp4TvShowTags(newFile); } }
void progress_ProgressChanged(object sender, ProgressStatus.ProgressChangedArgs e) { progressBar.Value = (int)progress.Percentage; }
public void Execute(MGFile file, ProgressStatus progress) { m_Plugin.Transcode(file, progress, m_Preset); }
public override bool LoadLayers(LayerCollection newLayers, ProgressStatus progress, FileInfo[] files) { bool bOK = true; float fPercentage = 0.0f; string layerDir = LayerDirectoryName; Dictionary<FileInfo, bool> layersToLoad = new Dictionary<FileInfo, bool>(); foreach (FileInfo file in files) { // Key is FileInfo, Value bool says if a layer backup should be loaded instead of the original layer file. // The layer may have been filtered if it belongs to a zone that isn't currently loaded. if (file != null) { layersToLoad.Add(file, false); } } // Check if there's any layers with backup files and mark their value with true. _layersBackupRestoreSelection.Clear(); if (CheckLayersInterimBackup(layersToLoad, layerDir)) { // Open layer lock dialog where user can select the layers to restore from the backup file. LayerRestoreDlg dlg = new LayerRestoreDlg(); dlg.RestoreLayerList = layersToLoad; dlg.ShowDialog(); // If the user chooses the layers to restore, the layers to restore will be locked, all others won't. _useLayersBackupRestore = true; } else { _useLayersBackupRestore = false; } foreach (var fileInfoEntry in layersToLoad) { FileInfo fileInfo = fileInfoEntry.Key; fPercentage += 100.0f / (float)files.Length; if (fileInfo == null || (fileInfo.Attributes & FileAttributes.Directory) != 0) // file info can be null continue; if (string.Compare(fileInfo.Extension, IScene.LayerFileExtension, true) != 0) continue; string layerFile = fileInfo.Name; if (!fileInfo.FullName.StartsWith(LayerDirectoryName)) // assume it is a layer reference layerFile = this.Project.MakeRelative(fileInfo.FullName); Layer layer = Layers.GetLayerByFilename(layerFile); Layer.LayerFileStatus_e newState = Layer.LayerFileStatus_e.NewLayer; if (layer != null) // already there { bool bModified = layer.LastModified != fileInfo.LastWriteTime; System.Diagnostics.Debug.Assert(!layer.OwnsLock || !bModified); if (bModified && !layer.OwnsLock) { newState = Layer.LayerFileStatus_e.Modified; } else { // don't add the non-modified layer to the list layer.FileStatus = Layer.LayerFileStatus_e.NotModified; continue; } } // If the layer is loaded from the backup, it will load the content from the backup file // and lock the layer right away bool useBackupRestore = fileInfoEntry.Value == true; IFormatter fmt = SerializationHelper.AUTO_FORMATTER; try { string layerToLoad = fileInfo.FullName; // If the layer was marked to load the backup file, add this here if (useBackupRestore) { layerToLoad += IScene.InterimBackupFileExtension; } // open the layer in read-only mode using (FileStream fs = new FileStream(layerToLoad, FileMode.Open, FileAccess.Read)) { layer = (Layer)fmt.Deserialize(fs); } // make sure there is only one layer of type V3DLayer [#18824] if (layer is V3DLayer) { foreach (Layer other in newLayers) if (other is V3DLayer) throw new Exception("The Layer directory contains more than one Layer of type 'Main Layer'. E.g. '" + layer.LayerFilename + "' and '" + other.LayerFilename + "'.\n\nIgnoring '" + layer.LayerFilename + "'"); } } catch (Exception ex) { EditorManager.DumpException(ex); EditorManager.ShowMessageBox("An exception occurred while loading layer '" + fileInfo.Name + "'\n\nDetailed Message:\n" + ex.Message, "Layer loading error", MessageBoxButtons.OK, MessageBoxIcon.Error); continue; } if (fileInfo.FullName.StartsWith(layerDir)) layer.SetLayerFileNameInternal(fileInfo.Name); else { // this layer is a reference string name = Project.MakeRelative(fileInfo.FullName); layer.SetLayerFileNameInternal(name); layer.IsReference = true; } layer.UpdateLastModified(fileInfo); layer.FileStatus = newState; layer.UpdateReadOnlyState(fileInfo); newLayers.Add(layer); if (progress != null) progress.Percentage = fPercentage; // If we restore a backup, we will always try to lock the layer and bypass a potential user choice for locking layers if (useBackupRestore) { _layersBackupRestoreSelection.Add(layer); } } return bOK; }
public EncoderStatusParser(string encoderName, int totalFrames, ProgressStatus progress) { m_EncoderName = encoderName; m_TotalFrames = totalFrames; m_StartTime = DateTime.Now; m_Progress = progress; }
/// <summary> /// Overidden function; additionally reloads the custom lighting file /// </summary> public override void OnCreateAllEngineInstances(ProgressStatus progress) { string meshAbsFilename = Path.Combine(this.LayerDirectoryName, "StaticLightingInfo.lit"); EditorManager.EngineManager.InitStaticLightingMode(meshAbsFilename); // load global lightgrid first EditorManager.EngineManager.ReloadLightingFiles(FileName, null); // removes extension for us... // scale down range (20%..90%) if (progress != null) progress.SetRange(progress.Percentage, 90.0f); base.OnCreateAllEngineInstances(progress); if (progress != null) { progress.SetRange(0.0f, 100.0f); // full range again progress.StatusString = "Reload lighting files"; } // reload lighting file EditorManager.EngineManager.ReloadStaticLightingMeshes(null, meshAbsFilename); string basename = FileNameNoExt + "_"; // also for every zone in the scene foreach (Zone zone in Zones) { if (!zone.Loaded) continue; meshAbsFilename = Path.Combine(this.LayerDirectoryName, "StaticLightingInfo_" + zone.ZoneName + ".lit"); EditorManager.EngineManager.ReloadStaticLightingMeshes(zone, meshAbsFilename); EditorManager.EngineManager.ReloadLightingFiles(basename + zone.ZoneName, zone); } // trigger the event EditorManager.TriggerSceneEvent(SceneEventArgs.Action.StaticLightingLoaded, true); if (progress != null) { progress.StatusString = "Finalizing..."; progress.Percentage = 98.0f; } }
public void Download() { if (parameters.OverWrite) { DeleteTempVhdIfExist(parameters.LocalFilePath); } else { if (File.Exists(parameters.LocalFilePath)) { var message = String.Format("File already exists, you can use Overwrite option to delete it:'{0}'", parameters.LocalFilePath); throw new ArgumentException(message); } } var blobHandle = new BlobHandle(parameters.BlobUri, this.parameters.StorageAccountKey); if (parameters.ValidateFreeDiskSpace) { TryValidateFreeDiskSpace(parameters.LocalFilePath, blobHandle.Length); } const int megaByte = 1024 * 1024; var ranges = blobHandle.GetUploadableRanges(); var bufferManager = BufferManager.CreateBufferManager(Int32.MaxValue, 20 * megaByte); var downloadStatus = new ProgressStatus(0, ranges.Sum(r => r.Length), new ComputeStats()); Trace.WriteLine(String.Format("Total Data:{0}", ranges.Sum(r => r.Length))); Program.SyncOutput.WriteVerboseWithTimestamp("Downloading the blob: {0}", parameters.BlobUri.BlobName); var fileStreamLock = new object(); using (new ServicePointHandler(parameters.BlobUri.Uri, parameters.ConnectionLimit)) { using (new ProgressTracker(downloadStatus, parameters.ProgressDownloadStatus, parameters.ProgressDownloadComplete, TimeSpan.FromSeconds(1))) { using (var fileStream = new FileStream(parameters.LocalFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Write, 8 * megaByte, FileOptions.WriteThrough)) { fileStream.SetLength(0); fileStream.SetLength(blobHandle.Length); LoopResult lr = Parallel.ForEach<IndexRange, Stream>(ranges, blobHandle.OpenStream, (r, b) => { b.Seek(r.StartIndex, SeekOrigin.Begin); byte[] buffer = this.EnsureReadAsSize(b, (int)r.Length, bufferManager); lock (fileStreamLock) { Trace.WriteLine(String.Format("Range:{0}", r)); fileStream.Seek(r.StartIndex, SeekOrigin.Begin); fileStream.Write(buffer, 0, (int)r.Length); fileStream.Flush(); } downloadStatus.AddToProcessedBytes((int)r.Length); }, pbwlf => { pbwlf.Dispose(); }, parameters.ConnectionLimit); if (lr.IsExceptional) { throw new AggregateException(lr.Exceptions); } } } } Program.SyncOutput.WriteVerboseWithTimestamp("Blob downloaded successfullty: {0}", parameters.BlobUri.BlobName); }
public ProgressData(ProgressStatus type, string filePathOpt) { Status = type; FilePathOpt = filePathOpt; }
public override Task Rebuild() { this.DebugDepth("Rebuild"); bool valuesChanged = false; // ensure we have good values StartPercent = agg_basics.Clamp(StartPercent, 0, 100, ref valuesChanged); if (Diameter < 1 || Diameter > 100000) { if (Diameter == double.MaxValue) { var aabb = this.GetAxisAlignedBoundingBox(); // uninitialized set to a reasonable value Diameter = (int)aabb.XSize; } Diameter = Math.Min(100000, Math.Max(1, Diameter)); valuesChanged = true; } MinSidesPerRotation = agg_basics.Clamp(MinSidesPerRotation, 3, 360, ref valuesChanged); var rebuildLocks = this.RebuilLockAll(); return(ApplicationController.Instance.Tasks.Execute( "Curve".Localize(), null, (reporter, cancellationToken) => { var sourceAabb = this.SourceContainer.GetAxisAlignedBoundingBox(); var radius = Diameter / 2; var circumference = MathHelper.Tau * radius; double numRotations = sourceAabb.XSize / circumference; double numberOfCuts = numRotations * MinSidesPerRotation; double cutSize = sourceAabb.XSize / numberOfCuts; double cutPosition = sourceAabb.MinXYZ.X + cutSize; var cuts = new List <double>(); for (int i = 0; i < numberOfCuts; i++) { cuts.Add(cutPosition); cutPosition += cutSize; } var rotationCenter = new Vector3(sourceAabb.MinXYZ.X + (sourceAabb.MaxXYZ.X - sourceAabb.MinXYZ.X) * (StartPercent / 100), BendCcw ? sourceAabb.MaxXYZ.Y + radius : sourceAabb.MinXYZ.Y - radius, sourceAabb.Center.Z); var curvedChildren = new List <IObject3D>(); var status = new ProgressStatus(); foreach (var sourceItem in SourceContainer.VisibleMeshes()) { var originalMesh = sourceItem.Mesh; status.Status = "Copy Mesh".Localize(); reporter.Report(status); var transformedMesh = originalMesh.Copy(CancellationToken.None); var itemMatrix = sourceItem.WorldMatrix(SourceContainer); // transform into this space transformedMesh.Transform(itemMatrix); if (SplitMesh) { status.Status = "Split Mesh".Localize(); reporter.Report(status); // split the mesh along the x axis transformedMesh.SplitOnPlanes(Vector3.UnitX, cuts, cutSize / 8); } for (int i = 0; i < transformedMesh.Vertices.Count; i++) { var position = transformedMesh.Vertices[i]; var angleToRotate = ((position.X - rotationCenter.X) / circumference) * MathHelper.Tau - MathHelper.Tau / 4; var distanceFromCenter = rotationCenter.Y - position.Y; if (!BendCcw) { angleToRotate = -angleToRotate; distanceFromCenter = -distanceFromCenter; } var rotatePosition = new Vector3Float(Math.Cos(angleToRotate), Math.Sin(angleToRotate), 0) * distanceFromCenter; rotatePosition.Z = position.Z; transformedMesh.Vertices[i] = rotatePosition + new Vector3Float(rotationCenter.X, radius + sourceAabb.MaxXYZ.Y, 0); } // transform back into item local space transformedMesh.Transform(Matrix4X4.CreateTranslation(-rotationCenter) * itemMatrix.Inverted); if (SplitMesh) { status.Status = "Merge Vertices".Localize(); reporter.Report(status); transformedMesh.MergeVertices(.1); } transformedMesh.CalculateNormals(); var curvedChild = new Object3D() { Mesh = transformedMesh }; curvedChild.CopyWorldProperties(sourceItem, SourceContainer, Object3DPropertyFlags.All, false); curvedChild.Visible = true; curvedChild.Translate(new Vector3(rotationCenter)); if (!BendCcw) { curvedChild.Translate(0, -sourceAabb.YSize - Diameter, 0); } curvedChildren.Add(curvedChild); } RemoveAllButSource(); this.SourceContainer.Visible = false; this.Children.Modify((list) => { list.AddRange(curvedChildren); }); UiThread.RunOnIdle(() => { rebuildLocks.Dispose(); if (valuesChanged) { Invalidate(InvalidateType.DisplayValues); } Parent?.Invalidate(new InvalidateArgs(this, InvalidateType.Children)); }); return Task.CompletedTask; })); }
public void Download() { if (parameters.OverWrite) { DeleteTempVhdIfExist(parameters.LocalFilePath); } else { if (File.Exists(parameters.LocalFilePath)) { var message = String.Format("File already exists, you can use Overwrite option to delete it:'{0}'", parameters.LocalFilePath); throw new ArgumentException(message); } } var blobHandle = new BlobHandle(parameters.BlobUri, this.parameters.StorageAccountKey); if (parameters.ValidateFreeDiskSpace) { TryValidateFreeDiskSpace(parameters.LocalFilePath, blobHandle.Length); } const int megaByte = 1024 * 1024; var ranges = blobHandle.GetUploadableRanges(); var bufferManager = BufferManager.CreateBufferManager(Int32.MaxValue, 20 * megaByte); var downloadStatus = new ProgressStatus(0, ranges.Sum(r => r.Length), new ComputeStats()); Trace.WriteLine(String.Format("Total Data:{0}", ranges.Sum(r => r.Length))); Program.SyncOutput.WriteVerboseWithTimestamp("Downloading the blob: {0}", parameters.BlobUri.BlobName); var fileStreamLock = new object(); using (new ServicePointHandler(parameters.BlobUri.Uri, parameters.ConnectionLimit)) { using (ProgressTracker progressTracker = new ProgressTracker(downloadStatus, parameters.ProgressDownloadStatus, parameters.ProgressDownloadComplete)) { using (var fileStream = new FileStream(parameters.LocalFilePath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.Write, 8 * megaByte, FileOptions.WriteThrough)) { fileStream.SetLength(0); fileStream.SetLength(blobHandle.Length); Task <LoopResult> task = Task <LoopResult> .Factory.StartNew(() => { return(Threading.Parallel.ForEach <IndexRange, Stream>(ranges, blobHandle.OpenStream, (r, b) => { b.Seek(r.StartIndex, SeekOrigin.Begin); byte[] buffer = this.EnsureReadAsSize(b, (int)r.Length, bufferManager); lock (fileStreamLock) { Trace.WriteLine(String.Format("Range:{0}", r)); fileStream.Seek(r.StartIndex, SeekOrigin.Begin); fileStream.Write(buffer, 0, (int)r.Length); fileStream.Flush(); } downloadStatus.AddToProcessedBytes((int)r.Length); }, pbwlf => { pbwlf.Dispose(); }, parameters.ConnectionLimit)); }); while (!task.Wait(TimeSpan.FromSeconds(1))) { progressTracker.Update(); } LoopResult lr = task.Result; if (lr.IsExceptional) { throw new AggregateException(lr.Exceptions); } } } } Program.SyncOutput.WriteVerboseWithTimestamp("Blob downloaded successfullty: {0}", parameters.BlobUri.BlobName); }
/// <summary> /// Train the model by iterative calculating weights to separate target and decoy transition groups. /// </summary> /// <param name="targets">Target transition groups.</param> /// <param name="decoys">Decoy transition groups.</param> /// <param name="initParameters">Initial model parameters (weights and bias)</param> /// <param name="includeSecondBest"> Include the second best peaks in the targets as decoys?</param> /// <param name="preTrain">Use a pre-trained model to bootstrap the learning.</param> /// <param name="progressMonitor"></param> /// <returns>Immutable model with new weights.</returns> public override IPeakScoringModel Train(IList <IList <float[]> > targets, IList <IList <float[]> > decoys, LinearModelParams initParameters, bool includeSecondBest = false, bool preTrain = true, IProgressMonitor progressMonitor = null) { if (initParameters == null) { initParameters = new LinearModelParams(_peakFeatureCalculators.Count); } return(ChangeProp(ImClone(this), im => { targets = targets.Where(list => list.Count > 0).ToList(); decoys = decoys.Where(list => list.Count > 0).ToList(); var targetTransitionGroups = new ScoredGroupPeaksSet(targets); var decoyTransitionGroups = new ScoredGroupPeaksSet(decoys); // Bootstrap from the pre-trained legacy model if (preTrain) { var preTrainedWeights = new double[initParameters.Weights.Count]; for (int i = 0; i < preTrainedWeights.Length; ++i) { if (double.IsNaN(initParameters.Weights[i])) { preTrainedWeights[i] = double.NaN; } } int standardEnabledCount = GetEnabledCount(LegacyScoringModel.StandardFeatureCalculators, initParameters.Weights); int analyteEnabledCount = GetEnabledCount(LegacyScoringModel.AnalyteFeatureCalculators, initParameters.Weights); bool hasStandards = standardEnabledCount >= analyteEnabledCount; var calculators = hasStandards ? LegacyScoringModel.StandardFeatureCalculators : LegacyScoringModel.AnalyteFeatureCalculators; for (int i = 0; i < calculators.Length; ++i) { if (calculators[i].GetType() == typeof(MQuestRetentionTimePredictionCalc)) { continue; } SetCalculatorValue(calculators[i].GetType(), LegacyScoringModel.DEFAULT_WEIGHTS[i], preTrainedWeights); } targetTransitionGroups.ScorePeaks(preTrainedWeights); decoyTransitionGroups.ScorePeaks(preTrainedWeights); } // Iteratively refine the weights through multiple iterations. var calcWeights = new double[initParameters.Weights.Count]; Array.Copy(initParameters.Weights.ToArray(), calcWeights, initParameters.Weights.Count); double decoyMean = 0; double decoyStdev = 0; bool colinearWarning = false; // This may take a long time between progress updates, but just measure progress by cycles through the training IProgressStatus status = new ProgressStatus(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model); if (progressMonitor != null) { progressMonitor.UpdateProgress(status); } for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) { if (progressMonitor != null) { if (progressMonitor.IsCanceled) { throw new OperationCanceledException(); } progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model__iteration__0__of__1__, iteration + 1, MAX_ITERATIONS)) .ChangePercentComplete((iteration + 1) * 100 / (MAX_ITERATIONS + 1))); } im.CalculateWeights(iteration, targetTransitionGroups, decoyTransitionGroups, includeSecondBest, calcWeights, out decoyMean, out decoyStdev, ref colinearWarning); GC.Collect(); // Each loop generates a number of large objects. GC helps to keep private bytes under control } if (progressMonitor != null) { progressMonitor.UpdateProgress(status.ChangePercentComplete(100)); } var parameters = new LinearModelParams(calcWeights); parameters = parameters.RescaleParameters(decoyMean, decoyStdev); im.Parameters = parameters; im.ColinearWarning = colinearWarning; im.UsesSecondBest = includeSecondBest; im.UsesDecoys = decoys.Count > 0; })); }
public void SetProgressBar(ProgressStatus performanceProgressBar) { this.progressBar = performanceProgressBar; }
// ReSharper restore NonLocalizedString /// <summary> /// Executes an export for all chromatograms in the document /// with file names matching one of the files in filesToExport /// writer = location to write the chromatogram data to /// longWaitBroker = progress bar (can be null) /// filesToExport = file names for which to write chromatograms /// cultureInfo = local culture /// chromExtractors = list of special chromatogram types to include (base peak, etc) /// chromSources = type of ions to include (precursor, product) /// </summary> public void Export(TextWriter writer, IProgressMonitor longWaitBroker, IList <string> filesToExport, CultureInfo cultureInfo, IList <ChromExtractor> chromExtractors, IList <ChromSource> chromSources) { int currentReplicates = 0; int totalReplicates = _chromatogramSets.Count; IProgressStatus status = new ProgressStatus(string.Empty); FormatHeader(writer, FIELD_NAMES); foreach (var chromatograms in _chromatogramSets) { if (longWaitBroker != null) { int percentComplete = currentReplicates++ *100 / totalReplicates; if (percentComplete < 100) { longWaitBroker.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.ChromatogramExporter_Export_Exporting_Chromatograms_for__0_, chromatograms.Name)).ChangePercentComplete(percentComplete)); } } foreach (var extractor in chromExtractors) { ChromatogramGroupInfo[] arrayChromSpecial; if (!_measuredResults.TryLoadAllIonsChromatogram(chromatograms, extractor, true, out arrayChromSpecial)) { // TODO: need error determination here continue; } foreach (var chromInfo in arrayChromSpecial) { string fileName = chromInfo.FilePath.GetFileName(); // Skip the files that have not been selected for export if (!filesToExport.Contains(fileName)) { continue; } var firstChromatogram = chromInfo.TransitionPointSets.First(); IList <float> times = firstChromatogram.Times; IList <float> intensities = firstChromatogram.Intensities; float tic = CalculateTic(times, intensities); string extractorName = GetExtractorName(extractor); string[] fieldArray = { fileName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, extractorName, TextUtil.EXCEL_NA, TextUtil.EXCEL_NA, System.Convert.ToString(tic, cultureInfo) }; FormatChromLine(writer, fieldArray, times, intensities, cultureInfo); } } var molecules = Document.Molecules.ToArray(); for (int iMolecule = 0; iMolecule < molecules.Length; iMolecule++) { if (longWaitBroker != null) { if (longWaitBroker.IsCanceled) { return; } longWaitBroker.UpdateProgress(status = status.ChangePercentComplete(iMolecule * 100 / molecules.Length)); } var peptideNode = molecules[iMolecule]; foreach (TransitionGroupDocNode groupNode in peptideNode.Children) { if (longWaitBroker != null && longWaitBroker.IsCanceled) { return; } ExportGroupNode(peptideNode, groupNode, chromatograms, filesToExport, chromSources, writer, cultureInfo); } } } }
public static Mesh Generate(IImageData resizedImage, double maxZ, double nozzleWidth, double pixelsPerMM, bool invert, IProgress <ProgressStatus> reporter) { // TODO: Move this to a user supplied value double baseThickness = nozzleWidth; // base thickness (in mm) double zRange = maxZ - baseThickness; // Dimensions of image var width = resizedImage.Width; var height = resizedImage.Height; var zScale = zRange / 255; var pixelData = resizedImage.Pixels; Stopwatch stopwatch = Stopwatch.StartNew(); var mesh = new Mesh(); //var rescale = (double)onPlateWidth / imageData.Width; var rescale = 1; var progressStatus = new ProgressStatus(); // Build an array of PixelInfo objects from each pixel // Collapse from 4 bytes per pixel to one - makes subsequent processing more logical and has minimal cost var pixels = pixelData.Where((x, i) => i % 4 == 0) // Interpolate the pixel color to zheight .Select(b => baseThickness + (invert ? 255 - b : b) * zScale) // Project to Vector3 for each pixel at the computed x/y/z .Select((z, i) => new Vector3( i % width * rescale, (i - i % width) / width * rescale * -1, z)) // Project to PixelInfo, creating a mirrored Vector3 at z0, paired together and added to the mesh .Select(vec => { var pixelInfo = new PixelInfo() { Top = vec, Bottom = new Vector3(vec.X, vec.Y, 0) }; mesh.Vertices.Add(pixelInfo.Top); mesh.Vertices.Add(pixelInfo.Bottom); return(pixelInfo); }).ToArray(); Console.WriteLine("ElapsedTime - PixelInfo Linq Generation: {0}", stopwatch.ElapsedMilliseconds); stopwatch.Restart(); // Select pixels along image edges var backRow = pixels.Take(width).Reverse().ToArray(); var frontRow = pixels.Skip((height - 1) * width).Take(width).ToArray(); var leftRow = pixels.Where((x, i) => i % width == 0).ToArray(); var rightRow = pixels.Where((x, i) => (i + 1) % width == 0).Reverse().ToArray(); int k, nextJ, nextK; var notificationInterval = 100; var workCount = (resizedImage.Width - 1) * (resizedImage.Height - 1) + (height - 1) + (width - 1); double workIndex = 0; // Vertical faces: process each row and column, creating the top and bottom faces as appropriate for (int i = 0; i < resizedImage.Height - 1; ++i) { var startAt = i * width; // Process each column for (int j = startAt; j < startAt + resizedImage.Width - 1; ++j) { k = j + 1; nextJ = j + resizedImage.Width; nextK = nextJ + 1; // Create north, then south face mesh.CreateFace(new [] { pixels[k].Top, pixels[j].Top, pixels[nextJ].Top, pixels[nextK].Top }); mesh.CreateFace(new [] { pixels[j].Bottom, pixels[k].Bottom, pixels[nextK].Bottom, pixels[nextJ].Bottom }); workIndex++; if (workIndex % notificationInterval == 0) { progressStatus.Progress0To1 = workIndex / workCount; reporter.Report(progressStatus); } } } // Side faces: East/West for (int j = 0; j < height - 1; ++j) { //Next row k = j + 1; // Create east, then west face mesh.CreateFace(new [] { leftRow[k].Top, leftRow[j].Top, leftRow[j].Bottom, leftRow[k].Bottom }); mesh.CreateFace(new [] { rightRow[k].Top, rightRow[j].Top, rightRow[j].Bottom, rightRow[k].Bottom }); workIndex++; if (workIndex % notificationInterval == 0) { progressStatus.Progress0To1 = workIndex / workCount; reporter.Report(progressStatus); } } // Side faces: North/South for (int j = 0; j < width - 1; ++j) { // Next row k = j + 1; // Create north, then south face mesh.CreateFace(new [] { frontRow[k].Top, frontRow[j].Top, frontRow[j].Bottom, frontRow[k].Bottom }); mesh.CreateFace(new [] { backRow[k].Top, backRow[j].Top, backRow[j].Bottom, backRow[k].Bottom }); workIndex++; if (workIndex % notificationInterval == 0) { progressStatus.Progress0To1 = workIndex / workCount; reporter.Report(progressStatus); } } Console.WriteLine("ElapsedTime - Face Generation: {0}", stopwatch.ElapsedMilliseconds); return(mesh); }
public static IrtDb GetIrtDb(string path, IProgressMonitor loadMonitor, out IList <DbIrtPeptide> dbPeptides) { var status = new ProgressStatus(string.Format(Resources.IrtDb_GetIrtDb_Loading_iRT_database__0_, path)); if (loadMonitor != null) { loadMonitor.UpdateProgress(status); } try { if (path == null) { throw new DatabaseOpeningException(Resources.IrtDb_GetIrtDb_Database_path_cannot_be_null); } if (!File.Exists(path)) { throw new DatabaseOpeningException(String.Format(Resources.IrtDb_GetIrtDb_The_file__0__does_not_exist_, path)); } string message; Exception xInner = null; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return(new IrtDb(path, sessionFactory).Load(loadMonitor, status, out dbPeptides)); } } } catch (UnauthorizedAccessException x) { message = string.Format(Resources.IrtDb_GetIrtDb_You_do_not_have_privileges_to_access_the_file__0_, path); xInner = x; } catch (DirectoryNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_path_containing__0__does_not_exist, path); xInner = x; } catch (FileNotFoundException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_created_Perhaps_you_do_not_have_sufficient_privileges, path); xInner = x; } catch (SQLiteException x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__is_not_a_valid_iRT_database_file, path); xInner = x; } catch (Exception x) { message = string.Format(Resources.IrtDb_GetIrtDb_The_file__0__could_not_be_opened, path); xInner = x; } throw new DatabaseOpeningException(message, xInner); } catch (DatabaseOpeningException x) { if (loadMonitor == null) { throw; } loadMonitor.UpdateProgress(status.ChangeErrorException(x)); dbPeptides = new DbIrtPeptide[0]; return(null); } }
public Task SaveSheets() { return(ApplicationController.Instance.Tasks.Execute( "Export Part Sheet".Localize(), null, async(reporter, cancelationToken) => { var progressStatus = new ProgressStatus(); var processCount = 0.0; currentlySaving = true; // first create images for all the parts foreach (var item in itemSource) { progressStatus.Status = item.Name; reporter.Report(progressStatus); var xxx = itemSource.Count(); var yyy = itemSource.FirstOrDefault()?.Name; var object3D = await item.CreateContent(); var loadedMeshGroups = object3D.VisibleMeshes().ToList(); if (loadedMeshGroups?.Count > 0) { AxisAlignedBoundingBox aabb = loadedMeshGroups[0].Mesh.GetAxisAlignedBoundingBox(loadedMeshGroups[0].WorldMatrix()); for (int i = 1; i < loadedMeshGroups.Count; i++) { aabb = AxisAlignedBoundingBox.Union(aabb, loadedMeshGroups[i].Mesh.GetAxisAlignedBoundingBox(loadedMeshGroups[i].WorldMatrix())); } RectangleDouble bounds2D = new RectangleDouble(aabb.MinXYZ.X, aabb.MinXYZ.Y, aabb.MaxXYZ.X, aabb.MaxXYZ.Y); double widthInMM = bounds2D.Width + PartMarginMM * 2; double textSpaceMM = 5; double heightMM = textSpaceMM + bounds2D.Height + PartMarginMM * 2; TypeFacePrinter typeFacePrinter = new TypeFacePrinter(item.Name, 28, Vector2.Zero, Justification.Center, Baseline.BoundsCenter); double sizeOfNameX = typeFacePrinter.GetSize().X + PartMarginPixels * 2; Vector2 sizeOfRender = new Vector2(widthInMM * PixelPerMM, heightMM * PixelPerMM); ImageBuffer imageOfPart = new ImageBuffer((int)(Math.Max(sizeOfNameX, sizeOfRender.X)), (int)(sizeOfRender.Y)); typeFacePrinter.Origin = new Vector2(imageOfPart.Width / 2, (textSpaceMM / 2) * PixelPerMM); Graphics2D partGraphics2D = imageOfPart.NewGraphics2D(); RectangleDouble rectBounds = new RectangleDouble(0, 0, imageOfPart.Width, imageOfPart.Height); double strokeWidth = .5 * PixelPerMM; rectBounds.Inflate(-strokeWidth / 2); RoundedRect rect = new RoundedRect(rectBounds, PartMarginMM * PixelPerMM); partGraphics2D.Render(rect, Color.LightGray); Stroke rectOutline = new Stroke(rect, strokeWidth); partGraphics2D.Render(rectOutline, Color.DarkGray); foreach (var meshGroup in loadedMeshGroups) { PolygonMesh.Rendering.OrthographicZProjection.DrawTo(partGraphics2D, meshGroup.Mesh, meshGroup.WorldMatrix(), new Vector2(-bounds2D.Left + PartMarginMM, -bounds2D.Bottom + textSpaceMM + PartMarginMM), PixelPerMM, Color.Black); } partGraphics2D.Render(typeFacePrinter, Color.Black); partImagesToPrint.Add(new PartImage(imageOfPart)); } progressStatus.Progress0To1 = Math.Min(processCount / itemSource.Count, .95); reporter.Report(progressStatus); processCount++; } progressStatus.Status = "Saving".Localize(); reporter.Report(progressStatus); partImagesToPrint.Sort(BiggestToLittlestImages); PdfDocument document = new PdfDocument(); document.Info.Title = "MatterHackers Parts Sheet"; document.Info.Author = "MatterHackers Inc."; document.Info.Subject = "This is a list of the parts that are in a queue from MatterControl."; document.Info.Keywords = "MatterControl, STL, 3D Printing"; int nextPartToPrintIndex = 0; int plateNumber = 1; bool done = false; while (!done && nextPartToPrintIndex < partImagesToPrint.Count) { PdfPage pdfPage = document.AddPage(); CreateOnePage(plateNumber++, ref nextPartToPrintIndex, pdfPage); } try { // save the final document document.Save(pathAndFileToSaveTo); // Now try and open the document. This will launch whatever PDF viewer is on the system and ask it // to show the file (at least on Windows). Process.Start(pathAndFileToSaveTo); } catch (Exception) { } currentlySaving = false; progressStatus.Progress0To1 = 1; reporter.Report(progressStatus); })); }
public UpdateProgressResponse UpdateProgress(ProgressStatus status) { _updateProgressImpl.Invoke(status); return(UpdateProgressResponse.normal); }
public override Task Rebuild() { this.DebugDepth("Rebuild"); bool valuesChanged = false; if (Angle < 1 || Angle > 100000) { Angle = Math.Min(100000, Math.Max(1, Angle)); valuesChanged = true; } if (RotationDistance < 0 || RotationDistance > 100000) { RotationDistance = Math.Min(100000, Math.Max(0, RotationDistance)); valuesChanged = true; } if (RotationSlices < 3 || RotationSlices > 300) { RotationSlices = Math.Min(300, Math.Max(3, RotationSlices)); valuesChanged = true; } if (EndHeightPercent < 1 || EndHeightPercent > 100) { EndHeightPercent = Math.Min(100, Math.Max(1, EndHeightPercent)); valuesChanged = true; } if (StartHeightPercent < 0 || StartHeightPercent > EndHeightPercent - 1) { StartHeightPercent = Math.Min(EndHeightPercent - 1, Math.Max(0, StartHeightPercent)); valuesChanged = true; } if (OverrideRadius < .01) { OverrideRadius = Math.Max(this.GetAxisAlignedBoundingBox().XSize, this.GetAxisAlignedBoundingBox().YSize); valuesChanged = true; } var rebuildLocks = this.RebuilLockAll(); return(ApplicationController.Instance.Tasks.Execute( "Twist".Localize(), null, (reporter, cancellationToken) => { var sourceAabb = this.SourceContainer.GetAxisAlignedBoundingBox(); var bottom = sourceAabb.MinXYZ.Z; var top = sourceAabb.ZSize * EndHeightPercent / 100.0; var size = sourceAabb.ZSize; if (Advanced) { bottom += sourceAabb.ZSize * StartHeightPercent / 100.0; size = top - bottom; } double numberOfCuts = RotationSlices; double cutSize = size / numberOfCuts; var cuts = new List <double>(); for (int i = 0; i < numberOfCuts + 1; i++) { var ratio = i / numberOfCuts; if (Advanced) { var goal = ratio; var current = .5; var next = .25; // look for an x value that equals the goal for (int j = 0; j < 64; j++) { var xAtY = Easing.Specify(EasingType, EasingOption, current); if (xAtY < goal) { current += next; } else if (xAtY > goal) { current -= next; } next *= .5; } ratio = current; } cuts.Add(bottom - cutSize + (size * ratio)); } // get the rotation from the center of the circumscribed circle of the convex hull var enclosingCircle = SourceContainer.GetSmallestEnclosingCircleAlongZ(); var rotationCenter = enclosingCircle.Center + RotationOffset; var twistedChildren = new List <IObject3D>(); var status = new ProgressStatus(); foreach (var sourceItem in SourceContainer.VisibleMeshes()) { var originalMesh = sourceItem.Mesh; status.Status = "Copy Mesh".Localize(); reporter.Report(status); var transformedMesh = originalMesh.Copy(CancellationToken.None); var itemMatrix = sourceItem.WorldMatrix(SourceContainer); // transform into this space transformedMesh.Transform(itemMatrix); status.Status = "Split Mesh".Localize(); reporter.Report(status); // split the mesh along the z axis transformedMesh.SplitOnPlanes(Vector3.UnitZ, cuts, cutSize / 8); for (int i = 0; i < transformedMesh.Vertices.Count; i++) { var position = transformedMesh.Vertices[i]; var ratio = (position.Z - bottom) / size; if (Advanced) { if (position.Z < bottom) { ratio = 0; } else if (position.Z > top) { ratio = 1; } else { ratio = (position.Z - bottom) / size; ratio = Easing.Specify(EasingType, EasingOption, ratio); } } var angleToRotate = ratio * Angle / 360.0 * MathHelper.Tau; if (RotationType == RotationTypes.Distance) { IRadiusProvider radiusProvider = RadiusProvider; // start off with assuming we want to set the radius var radius = this.OverrideRadius; if (radiusProvider != null && !this.EditRadius) { // have a radius provider and not wanting to edit radius = radiusProvider.Radius; } else if (!this.EditRadius) { // not wanting to edit radius = enclosingCircle.Radius; } if (this.PreferedRadius != radius) { this.PreferedRadius = radius; this.OverrideRadius = radius; UiThread.RunOnIdle(() => Invalidate(InvalidateType.DisplayValues)); } angleToRotate = ratio * (RotationDistance / radius); } if (!TwistCw) { angleToRotate = -angleToRotate; } var positionXy = new Vector2(position) - rotationCenter; positionXy.Rotate(angleToRotate); positionXy += rotationCenter; transformedMesh.Vertices[i] = new Vector3Float(positionXy.X, positionXy.Y, position.Z); } // transform back into item local space transformedMesh.Transform(itemMatrix.Inverted); //transformedMesh.MergeVertices(.1); transformedMesh.CalculateNormals(); var twistedChild = new Object3D() { Mesh = transformedMesh }; twistedChild.CopyWorldProperties(sourceItem, SourceContainer, Object3DPropertyFlags.All, false); twistedChild.Visible = true; twistedChildren.Add(twistedChild); } RemoveAllButSource(); this.SourceContainer.Visible = false; this.Children.Modify((list) => { list.AddRange(twistedChildren); }); rebuildLocks.Dispose(); if (valuesChanged) { Invalidate(InvalidateType.DisplayValues); } Parent?.Invalidate(new InvalidateArgs(this, InvalidateType.Children)); return Task.CompletedTask; })); }
public ProgressItem(ProgressStatus status, string message) { this.Status = status; this.Message = message; }
public Progress(ProgressStatus status) { Status = status; }
public override bool LoadLayers(LayerCollection newLayers, ProgressStatus progress, FileInfo[] files) { bool bOK = true; float fPercentage = 0.0f; string layerDir = LayerDirectoryName; foreach (FileInfo fileInfo in files) { fPercentage += 100.0f / (float)files.Length; if (fileInfo == null || (fileInfo.Attributes & FileAttributes.Directory) != 0) // file info can be null continue; if (string.Compare(fileInfo.Extension, IScene.LayerFileExtension, true) != 0) continue; string layerFile = fileInfo.Name; if (!fileInfo.FullName.StartsWith(LayerDirectoryName)) // assume it is a layer reference layerFile = this.Project.MakeRelative(fileInfo.FullName); Layer layer = Layers.GetLayerByFilename(layerFile); Layer.LayerFileStatus_e newState = Layer.LayerFileStatus_e.NewLayer; if (layer != null) // already there { bool bModified = layer.LastModified != fileInfo.LastWriteTime; System.Diagnostics.Debug.Assert(!layer.OwnsLock || !bModified); if (bModified && !layer.OwnsLock) { newState = Layer.LayerFileStatus_e.Modified; } else { // don't add the non-modified layer to the list layer.FileStatus = Layer.LayerFileStatus_e.NotModified; continue; } } BinaryFormatter fmt = SerializationHelper.BINARY_FORMATTER; try { // open the layer in read-only mode FileStream fs = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read); layer = (Layer)fmt.Deserialize(fs); fs.Close(); // make sure there is only one layer of type V3DLayer [#18824] if (layer is V3DLayer) { foreach (Layer other in newLayers) if (other is V3DLayer) throw new Exception("The Layer directory contains more than one Layer of type 'Main Layer'. E.g. '" + layer.LayerFilename + "' and '" + other.LayerFilename + "'.\n\nIgnoring '" + layer.LayerFilename + "'"); } } catch (Exception ex) { EditorManager.DumpException(ex); EditorManager.ShowMessageBox("An exception occurred while loading layer '" + fileInfo.Name + "'\n\nDetailed Message:\n" + ex.ToString(), "Layer loading error", MessageBoxButtons.OK, MessageBoxIcon.Error); continue; } if (fileInfo.FullName.StartsWith(layerDir)) layer.SetLayerFileNameInternal(fileInfo.Name); else { // this layer is a reference string name = Project.MakeRelative(fileInfo.FullName); layer.SetLayerFileNameInternal(name); layer.IsReference = true; } layer.UpdateLastModified(fileInfo); layer.FileStatus = newState; layer.UpdateReadOnlyState(fileInfo); newLayers.Add(layer); if (progress != null) progress.Percentage = fPercentage; } return bOK; }
public static async void UngroupSelection(this InteractiveScene scene) { var selectedItem = scene.SelectedItem; if (selectedItem != null) { if (selectedItem.CanFlatten) { selectedItem.Flatten(scene.UndoBuffer); scene.SelectedItem = null; return; } bool isGroupItemType = selectedItem.Children.Count > 0; // If not a Group ItemType, look for mesh volumes and split into distinct objects if found if (isGroupItemType) { // Create and perform the delete operation // Store the operation for undo/redo scene.UndoBuffer.AddAndDo(new UngroupCommand(scene, selectedItem)); } else if (!selectedItem.HasChildren() && selectedItem.Mesh != null) { await ApplicationController.Instance.Tasks.Execute( "Ungroup".Localize(), null, (reporter, cancellationToken) => { var progressStatus = new ProgressStatus(); reporter.Report(progressStatus); // clear the selection scene.SelectedItem = null; progressStatus.Status = "Copy".Localize(); reporter.Report(progressStatus); // try to cut it up into multiple meshes progressStatus.Status = "Split".Localize(); var discreetMeshes = CreateDiscreteMeshes.SplitVolumesIntoMeshes(selectedItem.Mesh, cancellationToken, (double progress0To1, string processingState) => { progressStatus.Progress0To1 = .5 + progress0To1 * .5; progressStatus.Status = processingState; reporter.Report(progressStatus); }); if (cancellationToken.IsCancellationRequested) { return(Task.CompletedTask); } if (discreetMeshes.Count == 1) { // restore the selection scene.SelectedItem = selectedItem; // No further processing needed, nothing to ungroup return(Task.CompletedTask); } // build the ungroup list List <IObject3D> addItems = new List <IObject3D>(discreetMeshes.Select(mesh => new Object3D() { Mesh = mesh, })); foreach (var item in addItems) { item.CopyProperties(selectedItem, Object3DPropertyFlags.All); item.Visible = true; } // add and do the undo data scene.UndoBuffer.AddAndDo(new ReplaceCommand(new[] { selectedItem }, addItems)); foreach (var item in addItems) { item.MakeNameNonColliding(); } return(Task.CompletedTask); }); } // leave no selection scene.SelectedItem = null; } }
public void UpdateProgress(ProgressStatus progress) { backgroundWorker1.ReportProgress(0, progress); }
public IrtDb AddPeptides(IProgressMonitor monitor, IList <DbIrtPeptide> newPeptides) { IProgressStatus status = new ProgressStatus(Resources.IrtDb_AddPeptides_Adding_peptides); return(AddPeptides(monitor, newPeptides, ref status)); }
// Throws DatabaseOpeningException public static IonMobilityDb GetIonMobilityDb(string path, IProgressMonitor loadMonitor) { var status = new ProgressStatus(string.Format(Resources.IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_, path)); if (loadMonitor != null) { loadMonitor.UpdateProgress(status); } try { if (String.IsNullOrEmpty(path)) { throw new DatabaseOpeningException(Resources.IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_library_); } if (!File.Exists(path)) { throw new DatabaseOpeningException( string.Format( Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path)); } string message; Exception xInner = null; try { //Check for a valid SQLite file and that it has our schema //Allow only one thread at a time to read from the same path using (var sessionFactory = GetSessionFactory(path)) { lock (sessionFactory) { return(new IonMobilityDb(path, sessionFactory).Load(loadMonitor, status)); } } } catch (UnauthorizedAccessException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobility_library_file__0_, path); xInner = x; } catch (DirectoryNotFoundException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_not_exist_, path); xInner = x; } catch (FileNotFoundException x) { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path); xInner = x; } catch (Exception x) // SQLiteException is already something of a catch-all, just lump it with the others here { message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_file_, path); xInner = x; } throw new DatabaseOpeningException(message, xInner); } catch (DatabaseOpeningException x) { if (loadMonitor == null) { throw; } loadMonitor.UpdateProgress(status.ChangeErrorException(x)); return(null); } }
public static Mesh Do(Mesh transformedKeep, Mesh transformedRemove, int opperation, IProgress <ProgressStatus> reporter, double amountPerOperation, double percentCompleted, ProgressStatus progressStatus, CancellationToken cancellationToken) { var libiglExe = "libigl_boolean.exe"; if (File.Exists(libiglExe) && IntPtr.Size == 8) // only try to run the improved booleans if we are 64 bit and it is there { string folderToSaveStlsTo = Path.Combine(ApplicationDataStorage.Instance.ApplicationTempDataPath, "amf_to_stl"); // Create directory if needed Directory.CreateDirectory(folderToSaveStlsTo); string stlFileA = Path.Combine(folderToSaveStlsTo, Path.ChangeExtension(Path.GetRandomFileName(), ".stl")); StlProcessing.Save(transformedKeep, stlFileA, CancellationToken.None); string stlFileB = Path.Combine(folderToSaveStlsTo, Path.ChangeExtension(Path.GetRandomFileName(), ".stl")); StlProcessing.Save(transformedRemove, stlFileB, CancellationToken.None); // wait for files to close Thread.Sleep(1000); string stlFileResult = Path.Combine(folderToSaveStlsTo, Path.ChangeExtension(Path.GetRandomFileName(), ".stl")); // if we have the libigl_boolean.exe var opperationString = "-"; switch (opperation) { case 0: opperationString = "+"; break; case 1: opperationString = "-"; break; case 2: opperationString = "&"; break; } var slicerProcess = new Process() { StartInfo = new ProcessStartInfo() { Arguments = "{0} {1} {2} {3}".FormatWith(stlFileA, stlFileB, stlFileResult, opperationString), CreateNoWindow = true, WindowStyle = ProcessWindowStyle.Hidden, RedirectStandardError = true, RedirectStandardOutput = true, FileName = libiglExe, UseShellExecute = false } }; slicerProcess.Start(); slicerProcess.WaitForExit(); // wait for file to close Thread.Sleep(1000); // load up the var result = StlProcessing.Load(stlFileResult, CancellationToken.None); if (result != null) { return(result); } } switch (opperation) { case 0: return(PolygonMesh.Csg.CsgOperations.Union(transformedKeep, transformedRemove, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + amountPerOperation * progress0To1; reporter.Report(progressStatus); }, cancellationToken)); case 1: return(PolygonMesh.Csg.CsgOperations.Subtract(transformedKeep, transformedRemove, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + amountPerOperation * progress0To1; reporter?.Report(progressStatus); }, cancellationToken)); case 2: return(PolygonMesh.Csg.CsgOperations.Intersect(transformedKeep, transformedRemove, (status, progress0To1) => { // Abort if flagged cancellationToken.ThrowIfCancellationRequested(); progressStatus.Status = status; progressStatus.Progress0To1 = percentCompleted + amountPerOperation * progress0To1; reporter.Report(progressStatus); }, cancellationToken)); } return(null); }
private void SubtractAndReplace(CancellationToken cancellationToken, IProgress <ProgressStatus> reporter) { SourceContainer.Visible = true; RemoveAllButSource(); var parentOfPaintTargets = SourceContainer.DescendantsAndSelfMultipleChildrenFirstOrSelf(); if (parentOfPaintTargets.Children.Count() < 2) { if (parentOfPaintTargets.Children.Count() == 1) { this.Children.Add(SourceContainer.Clone()); SourceContainer.Visible = false; } return; } SubtractObject3D_2.CleanUpSelectedChildrenNames(this); var paintObjects = parentOfPaintTargets.Children .Where((i) => SelectedChildren .Contains(i.ID)) .SelectMany(c => c.VisibleMeshes()) .ToList(); var keepItems = parentOfPaintTargets.Children .Where((i) => !SelectedChildren .Contains(i.ID)); var keepVisibleItems = keepItems.SelectMany(c => c.VisibleMeshes()).ToList(); if (paintObjects.Any() && keepVisibleItems.Any()) { var totalOperations = paintObjects.Count * keepVisibleItems.Count; double amountPerOperation = 1.0 / totalOperations; double percentCompleted = 0; var progressStatus = new ProgressStatus { Status = "Do CSG" }; foreach (var keep in keepVisibleItems) { var keepResultsMesh = keep.Mesh; var keepWorldMatrix = keep.WorldMatrix(SourceContainer); foreach (var paint in paintObjects) { Mesh paintMesh = BooleanProcessing.Do(keepResultsMesh, keepWorldMatrix, // paint data paint.Mesh, paint.WorldMatrix(SourceContainer), // operation type 2, // reporting data reporter, amountPerOperation, percentCompleted, progressStatus, cancellationToken); keepResultsMesh = BooleanProcessing.Do(keepResultsMesh, keepWorldMatrix, // point data paint.Mesh, paint.WorldMatrix(SourceContainer), // operation type 1, // reporting data reporter, amountPerOperation, percentCompleted, progressStatus, cancellationToken); // after the first time we get a result the results mesh is in the right coordinate space keepWorldMatrix = Matrix4X4.Identity; // store our intersection (paint) results mesh var paintResultsItem = new Object3D() { Mesh = paintMesh, Visible = false, OwnerID = paint.ID }; // copy all the properties but the matrix paintResultsItem.CopyWorldProperties(paint, SourceContainer, Object3DPropertyFlags.All & (~(Object3DPropertyFlags.Matrix | Object3DPropertyFlags.Visible))); // and add it to this this.Children.Add(paintResultsItem); // report our progress percentCompleted += amountPerOperation; progressStatus.Progress0To1 = percentCompleted; reporter?.Report(progressStatus); } // store our results mesh var keepResultsItem = new Object3D() { Mesh = keepResultsMesh, Visible = false, OwnerID = keep.ID }; // copy all the properties but the matrix keepResultsItem.CopyWorldProperties(keep, SourceContainer, Object3DPropertyFlags.All & (~(Object3DPropertyFlags.Matrix | Object3DPropertyFlags.Visible))); // and add it to this this.Children.Add(keepResultsItem); } foreach (var child in Children) { child.Visible = true; } SourceContainer.Visible = false; } }