public ProgressBar(GraphicsDevice device, Rectangle bounds)
             : base(device, bounds)
        {
            InitEvents();

            data = new ProgressData(0);
            BorderStyle = BorderStyle.FixedSingle;
            ForeColor = DefaultForeColor;
        }
Exemple #2
0
        public Slider(GraphicsDevice device, Rectangle bounds)
             : base(device, bounds)
        {
            InitEvents();

            SliderBarDimentions = new Rectangle(bounds.X, bounds.Y, Bounds.Width / 10, Bounds.Height);
            data = new ProgressData(0);
            BorderStyle = BorderStyle.FixedSingle;
            ForeColor = Color.Gray;
        }
Exemple #3
0
    private void updateSlotIndividualParadigm(ProgressData.SlotWrapper[] slots)
    {
        if (this.SlotId >= slots.Length)
        {
            SlotObject.sprite = this.LockedSlotSprite;
            SlotContentsObject.sprite = this.EmptySlotSprite;
            return;
        }

        SlotObject.sprite = this.UnlockedSlotSprite;
        configureSprite(slots[this.SlotId].SlotType);
    }
 /// <devdoc>
 ///    <para>Sets up async delegates, we need to create these on every instance when async</para>
 /// </devdoc>
 private void InitWebClientAsync() {
     if (!m_InitWebClientAsync) {
         openReadOperationCompleted = new SendOrPostCallback(OpenReadOperationCompleted);
         openWriteOperationCompleted = new SendOrPostCallback(OpenWriteOperationCompleted);
         downloadStringOperationCompleted = new SendOrPostCallback(DownloadStringOperationCompleted);
         downloadDataOperationCompleted = new SendOrPostCallback(DownloadDataOperationCompleted);
         downloadFileOperationCompleted = new SendOrPostCallback(DownloadFileOperationCompleted);
         uploadStringOperationCompleted = new SendOrPostCallback(UploadStringOperationCompleted);
         uploadDataOperationCompleted = new SendOrPostCallback(UploadDataOperationCompleted);
         uploadFileOperationCompleted = new SendOrPostCallback(UploadFileOperationCompleted);
         uploadValuesOperationCompleted = new SendOrPostCallback(UploadValuesOperationCompleted);
         reportDownloadProgressChanged = new SendOrPostCallback(ReportDownloadProgressChanged);
         reportUploadProgressChanged = new SendOrPostCallback(ReportUploadProgressChanged);
         m_Progress = new ProgressData();
         m_InitWebClientAsync = true;
     }
 }
        public void RefineAbundance(double scoreThreshold = -30, IProgress <ProgressData> progressReporter = null)
        {
            if (_alignedFeatures == null)
            {
                return;
            }

            progressReporter = progressReporter ?? new Progress <ProgressData>();

            var progressData = new ProgressData {
                IsPartialRange = true
            };

            for (var i = 0; i < CountDatasets; i++)
            {
                progressData.MaxPercentage = ((i + 1) * 100.0) / this.CountDatasets;
                var subProgress = new Progress <ProgressData>(pd => progressReporter.Report(progressData.UpdatePercent(pd.Percent)));
                FillMissingFeatures(i, scoreThreshold, subProgress);
                //Console.WriteLine("{0} has been processed...", RawFileList[i]);
            }
        }
Exemple #6
0
        private static MeshGroup ReadObject(XmlReader xmlTree, double scale, ProgressData progressData)
        {
            MeshGroup meshGroup = new MeshGroup();

            while (xmlTree.Read())
            {
                if (xmlTree.Name == "mesh")
                {
                    using (XmlReader meshTree = xmlTree.ReadSubtree())
                    {
                        ReadMesh(meshTree, meshGroup, scale, progressData);
                        if (progressData.LoadCanceled)
                        {
                            return(null);
                        }
                    }
                }
            }

            return(meshGroup);
        }
Exemple #7
0
        private SequenceTagGenerator GetSequenceTagGenerator(CancellationToken?cancellationToken = null, IProgress <ProgressData> progress = null)
        {
            var sequenceTagGen = new SequenceTagGenerator(_run, new Tolerance(5));
            var scanNums       = _ms2ScanNums;

            var progData = new ProgressData(progress)
            {
                Status = "Generating sequence tags"
            };

            var sw = new Stopwatch();

            // Rescore and Estimate #proteins for GF calculation
            long estimatedProteins = scanNums.Length;

            Console.WriteLine(@"Number of spectra: " + estimatedProteins);
            var numProteins = 0;
            var lastUpdate  = DateTime.MinValue; // Force original update of 0%

            sw.Reset();
            sw.Start();

            var pfeOptions = new ParallelOptions
            {
                MaxDegreeOfParallelism = MaxNumThreads,
                CancellationToken      = cancellationToken ?? CancellationToken.None
            };

            Parallel.ForEach(scanNums, pfeOptions, scanNum =>
            {
                sequenceTagGen.Generate(scanNum);
                SearchProgressReport(ref numProteins, ref lastUpdate, estimatedProteins, sw, progData,
                                     "spectra");
            });

            progData.StatusInternal = string.Empty;
            progData.Report(100.0);
            Console.WriteLine(@"Generated sequence tags: " + sequenceTagGen.NumberOfGeneratedTags());
            return(sequenceTagGen);
        }
Exemple #8
0
 public void OnCollide(LocalEventNotifier.Event localEvent)
 {
     foreach (GameObject hit in ((CollisionEvent)localEvent).Hits)
     {
         if (((1 << hit.layer) & this.PickupLayer) != 0)
         {
             WeaponPickup pickup = hit.GetComponent <WeaponPickup>();
             if (pickup != null)
             {
                 if (pickup.PickupContents.Type == WeaponPickup.PickupType.WeaponSlot)
                 {
                     WeaponData.Slot          slotType   = (WeaponData.Slot)pickup.PickupContents.Parameter;
                     ProgressData.SmartSlot[] smartSlots = ProgressData.SmartSlotsFromWrappers(this.Slots.ToArray());
                     bool ok = true;
                     for (int i = 0; i < smartSlots.Length; ++i)
                     {
                         if (smartSlots[i].SlotType == slotType)
                         {
                             ok = smartSlots[i].Level < WeaponData.GetMaxSlotsByType()[slotType] || smartSlots[i].Ammo < WeaponData.GetSlotDurationsByType()[slotType];
                             break;
                         }
                     }
                     if (ok)
                     {
                         pickupWeaponSlot(slotType);
                         Destroy(hit);
                     }
                 }
                 else
                 {
                     if (_damagable.Health < ProgressData.MAX_HEALTH)
                     {
                         _damagable.Heal(pickup.PickupContents.Parameter);
                         Destroy(hit);
                     }
                 }
             }
         }
     }
 }
        /// <summary>
        /// Gets the chunk file list.
        /// </summary>
        /// <param name="chunkListUrl">The chunk list URL.</param>
        /// <param name="progressIndicator">The progress indicator.</param>
        /// <returns></returns>
        public Queue <Uri> GetChunkFileList(string chunkListUrl, IProgress <IProgressData> progressIndicator)
        {
            IProgressData progressData = new ProgressData(0);

            progressIndicator.Report(progressData);

            int    lastSlashLocation = chunkListUrl.LastIndexOf(ForwardSlash);
            string baseAddress       = chunkListUrl.Remove(lastSlashLocation + 1);

            _baseAddress = new Uri(baseAddress);

            Task <string> downloadTask = _webClient.DownloadStringTaskAsync(chunkListUrl);

            string chunkFileList = downloadTask.Result;

            string[] allFileLines = chunkFileList.Split(NewLineCharacter);

            progressData.PercentDone = 100;
            progressIndicator.Report(progressData);

            return(ProcessUnparsedFileIntoUris(allFileLines));
        }
Exemple #10
0
        private void OnSolutionCrawlerProgressChanged(object sender, ProgressData progressData)
        {
            // there is no concurrent call to this method since ISolutionCrawlerProgressReporter will serialize all
            // events to preserve event ordering
            switch (progressData.Status)
            {
            case ProgressStatus.Started:
                StartedOrStopped(started: true);
                break;

            case ProgressStatus.Updated:
                ProgressUpdated(progressData.FilePathOpt);
                break;

            case ProgressStatus.Stoped:
                StartedOrStopped(started: false);
                break;

            default:
                throw ExceptionUtilities.UnexpectedValue(progressData.Status);
            }
        }
Exemple #11
0
 //zostanie wywolana przy zmianie sceny, jezeli zaszly zmiany zapisze je do pliku
 public void SaveData()
 {
     if (somethingWasChanged)
     {
         if (File.Exists(Application.persistentDataPath + "/progressData.dat"))
         {
             BinaryFormatter bf         = new BinaryFormatter();
             FileStream      file       = File.Open(Application.persistentDataPath + "/progressData.dat", FileMode.Open);
             ProgressData    dataToSave = new ProgressData();
             dataToSave = (ProgressData)bf.Deserialize(file);
             file.Close();
             dataToSave.playerSpeedPoints    = pointsInStatistisc[0];
             dataToSave.scorMultiplierPoints = pointsInStatistisc[2];
             dataToSave.starsToSave          = ownedStars;
             File.Delete(Application.persistentDataPath + "/progressData.dat");
             file = File.Create(Application.persistentDataPath + "/progressData.dat");
             bf.Serialize(file, dataToSave);
             file.Close();
         }
         Debug.Log("zmianabyla zapisuje");
     }
 }
Exemple #12
0
 // Start is called before the first frame update
 void Start()
 {
     DontDestroyOnLoad(gameObject);
     if (File.Exists(Application.persistentDataPath + savePath))
     {
         progressData = JsonUtility.FromJson <ProgressData>(File.ReadAllText(Application.persistentDataPath + savePath));
     }
     else
     {
         progressData = new ProgressData();
         SaveProgressData();
     }
     if (DEBUG_ResetPreference)
     {
         SaveDataToPrefs();
     }
     else
     {
         GetDataFromPrefs();
     }
     SceneManager.LoadSceneAsync(1);
 }
Exemple #13
0
        public static async Task <byte[]> DownloadDataAsync(string url, ProgressData progress = null, CancellationToken cancellationToken = default(CancellationToken))
        {
            try
            {
                byte[] bytes = null;
                using (var wc = new WebClient())
                {
                    wc.DownloadProgressChanged += (s, e) => progress?.Report(e.BytesReceived, e.TotalBytesToReceive);
                    wc.DownloadDataCompleted   += (s, e) =>
                    {
                        if (e.Error != null || e.Cancelled)
                        {
                            bytes = null;
                        }

                        bytes = e.Result;
                    };

                    wc.DownloadDataAsync(new Uri(url));

                    while (wc.IsBusy)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            throw new TaskCanceledException("Operation canceled by user.");
                        }

                        await Task.Delay(500);
                    }
                }

                return(bytes);
            }
            catch
            {
                return(null);
            }
        }
    public static void Initialize()
    {
        CleanUp();

        if (RuntimeData.system == null)
        {
            RuntimeData.GenerateNewSystem();
        }

        RuntimeData.system.Instantiate();

        if (!RuntimeData.system.pointsOfInterest.Contains(PlayerData.fleet))
        {
            RuntimeData.system.AddPointOfInterest(PlayerData.fleet);
        }

        _random = new Random(RuntimeData.system.seed);

        //add slot for player
        //this was f*****g retarded

        _uiManager = OverworldUIManager.getInstance;
        _uiManager.Initialize();

        _cameraManager = OverworldCameraManager.getInstance;
        _cameraManager.JumpTo(PlayerData.fleet.location);

        ToolbarManager.getInstance.Initialize();

        //loading complete callback
        if (!ProgressData.Evaluate(ProgressPoint.ReceivedStartEvent))
        {
            DialogueUIManager.getInstance.DisplayDialogueEvent(EventDB.GetStart());
        }

        UpdateSpawnChance();
        EndCurrentTurn();
    }
Exemple #15
0
        }     // Worker_ProgressChanged

        private void NotifyScanResult(ProgressData progress, bool isSkipped)
        {
            if (ChannelScanResult == null)
            {
                progress.Service.IsInactive = progress.IsInactive;
                return;
            } // if

            var e = new ScanResultEventArgs()
            {
                IsInactive  = progress.IsInactive,
                WasInactive = progress.WasInactive,
                IsSkipped   = isSkipped,
                Service     = progress.Service,
            };

            ChannelScanResult(this, e);

            if ((e.WasInactive != e.IsInactive) && (!e.IsInList))
            {
                RefreshNeeded = true;
            } // if
        }     // NotifyScanResult
Exemple #16
0
 private void SetProgress(ProgressData data)
 {
     try {
         if (!InvokeRequired)
         {
             progress.Value = (int)data.Percentage;
             if (spi.Checked && _xsvfFlasher == null)
             {
                 statuslbl.Text = string.Format("Processed block 0x{0:X} of 0x{1:X}", data.Current, data.Maximum);
             }
             else
             {
                 statuslbl.Text = string.Format("Processed {0} of {1} ({2}/s)", GetSizeReadable(data.Current), GetSizeReadable(data.Maximum), GetSizeReadable((long)(data.Current / _sw.Elapsed.TotalSeconds)));
             }
         }
         else
         {
             Invoke(new MethodInvoker(() => SetProgress(data)));
         }
     }
     catch {
     }
 }
        ProgressData AddProgress(int row, int column, int columnSpan = 1)
        {
            var textBlock = new TextBlock {
                HorizontalAlignment = HorizontalAlignment.Center, TextTrimming = TextTrimming.CharacterEllipsis
            };
            var progressBar = new ProgressBar {
                Height = 10, Minimum = 0, Margin = new Thickness(10, 0, 10, 0)
            };

            var sp = new StackPanel();

            sp.Children.Add(textBlock);
            sp.Children.Add(progressBar);
            Grid.SetRow(sp, row);
            Grid.SetColumn(sp, column);
            Grid.SetColumnSpan(sp, columnSpan);
            progressGrid.Children.Add(sp);

            var progressData = new ProgressData(sp, textBlock, progressBar);

            ProgressDatas.Add(progressData);
            return(progressData);
        }
Exemple #18
0
        private void UpdateUI(ProgressData progressData)
        {
            if (progressData.Status == ProgressStatus.Stopped)
            {
                StopTaskCenter();
                return;
            }

            // Update the pending item count if the progress data specifies a value.
            if (progressData.PendingItemCount.HasValue)
            {
                _lastPendingItemCount = progressData.PendingItemCount.Value;
            }

            // Start the task center task if not already running.
            if (_taskHandler == null)
            {
                // Register a new task handler to handle a new task center task.
                // Each task handler can only register one task, so we must create a new one each time we start.
                _taskHandler = _taskCenterService.PreRegister(_options, data: default);

                // Create a new non-completed task to be tracked by the task handler.
                _taskCenterTask = new TaskCompletionSource <VoidResult>();
                _taskHandler.RegisterTask(_taskCenterTask.Task);
            }

            var statusMessage = progressData.Status == ProgressStatus.Paused
                ? ServicesVSResources.Paused_0_tasks_in_queue
                : ServicesVSResources.Evaluating_0_tasks_in_queue;

            _taskHandler.Progress.Report(new TaskProgressData
            {
                ProgressText    = string.Format(statusMessage, _lastPendingItemCount),
                CanBeCanceled   = false,
                PercentComplete = null,
            });
        }
Exemple #19
0
        private void WKHtmltopdfProcessOnErrorDataReceived(DataReceivedEventArgs e, WKHtmltopdfParameters parameters, ref Exception exception, List <string> message)
        {
            var totalMediaDuration = new TimeSpan(0, 0, 120);

            if (e.Data == null)
            {
                return;
            }

            try
            {
                message.Insert(0, e.Data);
                //if(parameters.InputFile!=null)
                //{

                //}
                var progreeData = new ProgressData(TimeSpan.FromSeconds(10), TimeSpan.FromMinutes(10), null, null, null, null);
                OnProgressChanged(new ConversionProgressEventArgs(progreeData, parameters.InputFiles, parameters.OutputFile));
            }
            catch (Exception ex)
            {
                exception = ex;
            }
        }
Exemple #20
0
        IEnumerator DoProgress()
        {
            WaitForEndOfFrame wait = new WaitForEndOfFrame();

            while (mProgress.Count > 0)
            {
                ProgressData curProg = mProgress.Dequeue();

                mCurTrans = curProg.transition;
                mCurTrans.Prepare();

                if (curProg.call != null)
                {
                    curProg.call(mCurTrans, Action.Begin);
                }

                while (!mCurTrans.isDone)
                {
                    //Debug.Log("playing: "+trans);
                    RestorePlayer();
                    yield return(wait);
                }

                mPrevTrans = mCurTrans;

                if (curProg.call != null)
                {
                    curProg.call(mCurTrans, Action.End);
                }

                mCurTrans  = null;
                mCurPlayer = null;
            }

            mProgressRoutine = null;
        }
Exemple #21
0
 public SimilarityFinder(int minClusterSize, ProgressData progressData)
 {
     _minClusterSize = minClusterSize;
     _progressData   = progressData;
 }
Exemple #22
0
 private void ProgressChanged(ProgressData p)
 {
     SetSliders();
 }
Exemple #23
0
        private async Task <List <Node> > GetLeafNodesAsync(IReadOnlyCollection <IClusterableMatch> clusterableMatches, IReadOnlyDictionary <int, float[]> matrix, IDistanceMetric distanceMetric, ProgressData progressData)
        {
            var average = clusterableMatches.Average(match => match.Coords.Count);

            progressData.Reset($"Calculating coordinates for {clusterableMatches.Count} matches (average {average:N0} shared matches per match)...", clusterableMatches.Count);

            var leafNodes = await Task.Run(() =>
            {
                return(clusterableMatches
                       .Where(match => matrix.ContainsKey(match.Index))
                       .Select(match => new LeafNode(match.Index, matrix[match.Index], distanceMetric))
                       .ToList());
            });

            progressData.Reset($"Finding closest pairwise distances for {clusterableMatches.Count} matches (average {average:N0} shared matches per match)...", clusterableMatches.Count);

            await CalculateNeighborsAsync(leafNodes, leafNodes, distanceMetric, progressData);

            var result = leafNodes.ToList <Node>();

            progressData.Reset();
            return(result);
        }
Exemple #24
0
        private async Task <List <ClusterNode> > ClusterAsync(IReadOnlyCollection <IClusterableMatch> clusterableMatches, List <IClusterableMatch> immediateFamily, IReadOnlyDictionary <int, float[]> matrix, ProgressData progressData)
        {
            var distanceMetric = _distanceMetricFactory(immediateFamily);

            var matchNodes = await GetLeafNodesAsync(clusterableMatches, matrix, distanceMetric, progressData).ConfigureAwait(false);

            var nodes = await BuildClustersAsync(matchNodes, distanceMetric, progressData).ConfigureAwait(false);

            return(nodes);
        }
		static void ProgressWork (SvnProgressEventArgs e, ProgressData data, IProgressMonitor monitor)
		{
			if (monitor == null)
				return;

			int currentProgress = (int)e.Progress;
			if (currentProgress == 0)
				return;

			int totalProgress = (int)e.TotalProgress;
			if (totalProgress != -1 && currentProgress >= totalProgress) {
				data.LogTimer.Close ();
				return;
			}

			data.Bytes = currentProgress;
			if (data.LogTimer.Enabled)
				return;

			data.LogTimer.Interval = 1000;
			data.LogTimer.Elapsed += delegate {
				data.Seconds += 1;
				monitor.Log.WriteLine ("{0} bytes in {1} seconds", data.Bytes, data.Seconds);
			};
			data.LogTimer.Start ();
		}
Exemple #26
0
        private void SetProgress(ProgressData data)
        {
            try {
                if(!InvokeRequired) {
                    progress.Value = (int) data.Percentage;
                    if(spi.Checked && _xsvfFlasher == null)
                        statuslbl.Text = string.Format("Processed block 0x{0:X} of 0x{1:X}", data.Current, data.Maximum);
                    else {
                        statuslbl.Text = string.Format("Processed {0} of {1} ({2}/s)", GetSizeReadable(data.Current), GetSizeReadable(data.Maximum), GetSizeReadable((long) (data.Current / _sw.Elapsed.TotalSeconds)));
                    }

                }
                else
                    Invoke(new MethodInvoker(() => SetProgress(data)));
            }
            catch {
            }
        }
 private void postProgressChanged(AsyncOperation asyncOp, ProgressData progress)
 {
     if ((asyncOp != null) && progress.TotalBytesToSend > 0)
     {
         var _pct = (int)((progress.BytesSent * 100) / progress.TotalBytesToSend);
         asyncOp.Post(this.m_UploadProgressChanged, new UploadFileProgressEventArgs(_pct, asyncOp.UserSuppliedState, progress.BytesSent, progress.TotalBytesToSend));
     }
 }
Exemple #28
0
		void BindMonitor (SvnClientArgs args, IProgressMonitor monitor)
		{
			notifyData = new NotifData ();
			progressData = new ProgressData ();

			updateMonitor = monitor;
		}
 private void PostProgressChanged(AsyncOperation asyncOp, ProgressData progress) {
     if (asyncOp != null && progress.BytesSent + progress.BytesReceived > 0)
     {
         int progressPercentage;
         if (progress.HasUploadPhase)
         {
             if (progress.TotalBytesToReceive < 0 && progress.BytesReceived == 0)
             {
                 progressPercentage = progress.TotalBytesToSend < 0 ? 0 : progress.TotalBytesToSend == 0 ? 50 : (int)((50 * progress.BytesSent) / progress.TotalBytesToSend);
             }
             else
             {
                 progressPercentage = progress.TotalBytesToSend < 0 ? 50 : progress.TotalBytesToReceive == 0 ? 100 : (int) ((50 * progress.BytesReceived) / progress.TotalBytesToReceive + 50);
             }
             asyncOp.Post(reportUploadProgressChanged, new UploadProgressChangedEventArgs(progressPercentage, asyncOp.UserSuppliedState, progress.BytesSent, progress.TotalBytesToSend, progress.BytesReceived, progress.TotalBytesToReceive));
         }
         else
         {
             progressPercentage = progress.TotalBytesToReceive < 0 ? 0 : progress.TotalBytesToReceive == 0 ? 100 : (int) ((100 * progress.BytesReceived) / progress.TotalBytesToReceive);
             asyncOp.Post(reportDownloadProgressChanged, new DownloadProgressChangedEventArgs(progressPercentage, asyncOp.UserSuppliedState, progress.BytesReceived, progress.TotalBytesToReceive));
         }
     }
 }
Exemple #30
0
		IntPtr TryStartOperation (IProgressMonitor monitor)
		{
			lock (sync) {
				if (inProgress)
					throw new SubversionException ("Another Subversion operation is already in progress.");
				inProgress = true;
				updatemonitor = monitor;
				progressData = new ProgressData ();
				return newpool (pool);
			}
		}
Exemple #31
0
        public UploadResults PostImage(Image image, ImageFormat format)
        {
            MemoryStream imageDataStream = new MemoryStream();
            image.Save(imageDataStream, format);

            // get the raw bytes
            byte[] imageDataBytes;
            imageDataBytes = imageDataStream.ToArray();

            // construct the post string
            string base64img = System.Convert.ToBase64String(imageDataBytes);
            StringBuilder sb = new StringBuilder();

            for (int i = 0; i < base64img.Length; i += MAX_URI_LENGTH)
            {
                sb.Append(Uri.EscapeDataString(base64img.Substring(i, Math.Min(MAX_URI_LENGTH, base64img.Length - i))));
            }

            string uploadRequestString = "image=" + sb.ToString() + "&key=" + apiKey;

            HttpWebRequest webRequest = (HttpWebRequest)WebRequest.Create(POST_URL);
            webRequest.Method = "POST";
            webRequest.ContentType = "application/x-www-form-urlencoded";
            webRequest.ServicePoint.Expect100Continue = false;

            Stream reqStream = webRequest.GetRequestStream();
            byte[] reqBuffer = Encoding.UTF8.GetBytes(uploadRequestString);

            // send the bytes in chunks so we can check the progress
            int overflow = reqBuffer.Length % UPDATE_CHUNKS;
            ProgressData progress = new ProgressData();
            progress.max_value = reqBuffer.Length;
            progress.value = 0;
            int size = UPDATE_CHUNKS;
            for (int i = 0; i < reqBuffer.Length; i += UPDATE_CHUNKS)
            {
                if (i + UPDATE_CHUNKS > reqBuffer.Length-1)
                    size = overflow;
                reqStream.Write(reqBuffer, i, size);

                // update the progress
                progress.value = i + size;
                if (_uploadprogressUpdateProperty != null)
                    _uploadprogressUpdateProperty(progress);
            }
            reqStream.Close();

            WebResponse response = webRequest.GetResponse();
            Stream responseStream = response.GetResponseStream();
            StreamReader responseReader = new StreamReader(responseStream);

            string responseString = responseReader.ReadToEnd();

            UploadResults results = ParseResponse(responseString);
            if (_uploadCompleteProperty != null)
                _uploadCompleteProperty(results);

            return results;
        }
 private void initUploadClientState()
 {
     if (!this.m_InitUploadClientAsync)
     {
         this.m_UploadOperationCompleted = new SendOrPostCallback(this.OnUploadCompleted);
         this.m_UploadProgressChanged = new SendOrPostCallback(this.OnUploadProgressChanged);
         this.m_ProgressData = new ProgressData();
         this.m_InitUploadClientAsync = true;
     }
 }
		IntPtr TryStartOperation (ProgressMonitor monitor)
		{
			Monitor.Enter (svn);
			updatemonitor = monitor;
			progressData = new ProgressData ();
			return newpool (pool);
		}
Exemple #34
0
		IntPtr TryStartOperation (IProgressMonitor monitor)
		{
			lock (sync) {
				if (inProgress) {
					var se = new SubversionException ("Another Subversion operation is already in progress.");
					se.Data.Add ("OldStacktrace", oldStacktrace);
					se.Data.Add ("CurrentStackTrace", Environment.StackTrace);
					throw se;
				}
				oldStacktrace = Environment.StackTrace;
				inProgress = true;
				updatemonitor = monitor;
				progressData = new ProgressData ();
				return newpool (pool);
			}
		}
 internal DownloadBitsState(WebRequest request, Stream writeStream, CompletionDelegate completionDelegate, AsyncOperation asyncOp, ProgressData progress, WebClient webClient) {
     WriteStream = writeStream;
     Request = request;
     AsyncOp = asyncOp;
     CompletionDelegate = completionDelegate;
     WebClient = webClient;
     Progress = progress;
 }
		static void ProgressWork (SvnProgressEventArgs e, ProgressData data, IProgressMonitor monitor)
		{
			if (monitor == null)
				return;

			long currentProgress = e.Progress;
			if (currentProgress <= data.KBytes) {
				if (data.SavedProgress < data.KBytes) {
					data.SavedProgress += data.KBytes;
				}
				return;
			}

			long totalProgress = e.TotalProgress;
			if (totalProgress != -1 && currentProgress >= totalProgress) {
				data.LogTimer.Dispose ();
				return;
			}

			data.Remainder += currentProgress % 1024;
			if (data.Remainder >= 1024) {
				data.SavedProgress += data.Remainder / 1024;
				data.Remainder = data.Remainder % 1024;
			}

			data.KBytes = data.SavedProgress + currentProgress / 1024;
			if (data.LogTimer.Enabled)
				return;

			data.LogTimer.Interval = 1000;
			data.LogTimer.Elapsed += delegate {
				data.Seconds += 1;
				monitor.Log.WriteLine ("Transferred {0} in {1} seconds.", BytesToSize (data.KBytes), data.Seconds);
			};
			data.LogTimer.Start ();
		}
 string IFileSystem.GetFileContent(string filename)
 {
     position = position.CreateIncrementalPosition();
     progress.Report(ProgressData.InProgress(position));
     return(filesystem.GetFileContent(filename));
 }
Exemple #38
0
 private void updateSlotLevelParadigm(ProgressData.SlotWrapper[] slots)
 {
     ProgressData.SmartSlot smartSlot = ProgressData.GetSmartSlot(slots, this.SlotId);
     updateSlotHelper(smartSlot.SlotType, smartSlot.Ammo, smartSlot.Level);
 }
		void BindMonitor (IProgressMonitor monitor)
		{
			notifyData = new NotifData ();
			progressData = new ProgressData ();

			updateMonitor = monitor;
		}
Exemple #40
0
        /// <summary>
        /// Main method for training
        /// </summary>
        /// <param name="trainer"></param>
        /// <param name="network"></param>
        /// <param name="trParams"></param>
        /// <param name="miniBatchSource"></param>
        /// <param name="device"></param>
        /// <param name="token"></param>
        /// <param name="progress"></param>
        /// <param name="modelCheckPoint"></param>
        /// <returns></returns>
        public override TrainResult Train(Trainer trainer, Function network, TrainingParameters trParams,
                                          MinibatchSourceEx miniBatchSource, DeviceDescriptor device, CancellationToken token, TrainingProgress progress, string modelCheckPoint, string historyPath)
        {
            try
            {
                //create trainer result.
                // the variable indicate how training process is ended
                // completed, stopped, crashed,
                var trainResult = new TrainResult();
                var historyFile = "";
                //create training process evaluation collection
                //for each iteration it is stored evaluationValue for training, and validation set with the model
                m_ModelEvaluations = new List <Tuple <double, double, string> >();

                //check what is the optimization (Minimization (error) or maximization (accuracy))
                bool isMinimize = StatMetrics.IsGoalToMinimize(trainer.EvaluationFunction());

                //setup first iteration
                if (m_trainingHistory == null)
                {
                    m_trainingHistory = new List <Tuple <int, float, float, float, float> >();
                }
                //in case of continuation of training iteration must start with the last of path previous training process
                int epoch = (m_trainingHistory.Count > 0)? m_trainingHistory.Last().Item1 + 1:1;

                //define progressData
                ProgressData prData = null;

                //define helper variable collection
                var vars = InputVariables.Union(OutputVariables).ToList();

                //training process
                while (true)
                {
                    //get mini batch data
                    var args = miniBatchSource.GetNextMinibatch(trParams.BatchSize, device);

                    var arguments = MinibatchSourceEx.ToMinibatchData(args, vars, miniBatchSource.Type);
                    //
                    trainer.TrainMinibatch(arguments, device);

                    //make progress
                    if (args.Any(a => a.Value.sweepEnd))
                    {
                        //check the progress of the training process
                        prData = progressTraining(trParams, trainer, network, miniBatchSource, epoch, progress, device);
                        //check if training process ends
                        if (epoch >= trParams.Epochs)
                        {
                            //save training checkpoint state
                            if (!string.IsNullOrEmpty(modelCheckPoint))
                            {
                                trainer.SaveCheckpoint(modelCheckPoint);
                            }

                            //save training history
                            if (!string.IsNullOrEmpty(historyPath))
                            {
                                string header = $"{trainer.LossFunction().Name};{trainer.EvaluationFunction().Name};";
                                saveTrainingHistory(m_trainingHistory, header, historyPath);
                            }

                            //save best or last trained model and send report last time before trainer completes
                            var bestModelPath = saveBestModel(trParams, trainer.Model(), epoch, isMinimize);
                            //
                            if (progress != null)
                            {
                                progress(prData);
                            }
                            //
                            trainResult.Iteration           = epoch;
                            trainResult.ProcessState        = ProcessState.Compleated;
                            trainResult.BestModelFile       = bestModelPath;
                            trainResult.TrainingHistoryFile = historyFile;
                            break;
                        }
                        else
                        {
                            epoch++;
                        }
                    }
                    //stop in case user request it
                    if (token.IsCancellationRequested)
                    {
                        if (!string.IsNullOrEmpty(modelCheckPoint))
                        {
                            trainer.SaveCheckpoint(modelCheckPoint);
                        }

                        //save training history
                        if (!string.IsNullOrEmpty(historyPath))
                        {
                            string header = $"{trainer.LossFunction().Name};{trainer.EvaluationFunction().Name};";
                            saveTrainingHistory(m_trainingHistory, header, historyPath);
                        }

                        //sometime stopping training process can be before first epoch passed so make a incomplete progress
                        if (prData == null)//check the progress of the training process
                        {
                            prData = progressTraining(trParams, trainer, network, miniBatchSource, epoch, progress, device);
                        }

                        //save best or last trained model and send report last time before trainer terminates
                        var bestModelPath = saveBestModel(trParams, trainer.Model(), epoch, isMinimize);
                        //
                        if (progress != null)
                        {
                            progress(prData);
                        }

                        //setup training result
                        trainResult.Iteration           = prData.EpochCurrent;
                        trainResult.ProcessState        = ProcessState.Stopped;
                        trainResult.BestModelFile       = bestModelPath;
                        trainResult.TrainingHistoryFile = historyFile;
                        break;
                    }
                }

                return(trainResult);
            }
            catch (Exception ex)
            {
                var ee = ex;
                throw;
            }
            finally
            {
            }
        }
		void BindMonitor (SvnClientArgs args, IProgressMonitor monitor)
		{
			NotifData data = new NotifData ();
			progressData = new ProgressData ();

			args.Notify += delegate (object o, SvnNotifyEventArgs e) {
				Notify (e, data, monitor);
			};
			args.SvnError += delegate (object o, SvnErrorEventArgs a) {
				monitor.ReportError (a.Exception.Message, a.Exception.RootCause);
			};

			updateMonitor = monitor;
		}
Exemple #42
0
		IntPtr TryStartOperation (IProgressMonitor monitor)
		{
			lock (sync) {
				if (inProgress) {
					LoggingService.LogError ("Old: {0}", oldStacktrace);
					LoggingService.LogError ("Current: {1}", Environment.StackTrace);
					throw new SubversionException ("Another Subversion operation is already in progress.");
				}
				oldStacktrace = Environment.StackTrace;
				inProgress = true;
				updatemonitor = monitor;
				progressData = new ProgressData ();
				return newpool (pool);
			}
		}
            internal UploadBitsState(WebRequest request, Stream readStream, byte [] buffer, byte [] header, byte [] footer, CompletionDelegate completionDelegate, AsyncOperation asyncOp, ProgressData progress, WebClient webClient) {
                InnerBuffer = buffer;
                Header = header;
                Footer = footer;
                ReadStream = readStream;
                Request = request;
                AsyncOp = asyncOp;
                CompletionDelegate = completionDelegate;

                if (AsyncOp != null)
                {
                    Progress = progress;
                    Progress.HasUploadPhase = true;
                    Progress.TotalBytesToSend = request.ContentLength < 0 ? -1 : request.ContentLength;
                }

                WebClient = webClient;
            }
Exemple #44
0
		private static MeshGroup ReadObject(XmlReader xmlTree, double scale, ProgressData progressData)
		{
			MeshGroup meshGroup = new MeshGroup();
			while (xmlTree.Read())
			{
				if (xmlTree.Name == "mesh")
				{
					using (XmlReader meshTree = xmlTree.ReadSubtree())
					{
						ReadMesh(meshTree, meshGroup, scale, progressData);
						if (progressData.LoadCanceled)
						{
							return null;
						}
					}
				}
			}

			return meshGroup;
		}
Exemple #45
0
        private static async Task <List <ClusterNode> > BuildClustersAsync(ICollection <Node> nodes, IDistanceMetric distanceMetric, ProgressData progressData)
        {
            var nodeCount = nodes
                            .SelectMany(node => node.NeighborsByDistance.Select(neighbor => neighbor.Node.FirstLeaf.Index))
                            .Concat(nodes.Select(node => node.FirstLeaf.Index))
                            .Distinct().Count();

            progressData.Reset($"Building clusters for {nodeCount} matches...", nodes.Count - 1);

            await Task.Run(async() =>
            {
                // Collect isolated nodes off to the side as we find them
                var isolatedNodes = new List <Node>();

                while (nodes.Count > 1)
                {
                    // This is a little verbose, but optimized for performance -- O(N) overall.
                    Node secondNode       = null;
                    var neighborToCluster = new Neighbor {
                        DistanceSquared = double.MaxValue
                    };
                    foreach (var node in nodes)
                    {
                        if (node.FirstLeaf.NeighborsByDistance.Count > 0 && node.FirstLeaf.NeighborsByDistance.First().DistanceSquared < neighborToCluster.DistanceSquared)
                        {
                            secondNode        = node;
                            neighborToCluster = node.FirstLeaf.NeighborsByDistance.First();
                        }
                        if (node.FirstLeaf != node.SecondLeaf && node.SecondLeaf.NeighborsByDistance.Count > 0 && node.SecondLeaf.NeighborsByDistance.First().DistanceSquared < neighborToCluster.DistanceSquared)
                        {
                            secondNode        = node;
                            neighborToCluster = node.SecondLeaf.NeighborsByDistance.First();
                        }
                    }

                    var foundNodesToCluster = secondNode != null;

                    ClusterNode clusterNode;
                    if (!foundNodesToCluster)
                    {
                        // Some of the nodes might have no neighbors because they are fully isolated.
                        // In other words, none of the leaf nodes in the cluster has any shared matches outside of the cluster.
                        // This might happen for a very distant cluster with no sharing in closer relatives,
                        // or for example a split between maternal and paternal relatives.
                        var isIsolatedNodes = nodes.ToLookup(node =>
                        {
                            var leafNodeIndexes = node.GetOrderedLeafNodesIndexes();
                            return(node.GetOrderedLeafNodes().All(leafNode => leafNodeIndexes.IsSupersetOf(leafNode.Coords.Keys)));
                        });
                        var newIsolatedNodes = isIsolatedNodes[true].ToList();
                        if (newIsolatedNodes.Count > 0)
                        {
                            // Segregate the isolated nodes, since there is nothing that will make them un-isolated.
                            isolatedNodes.AddRange(newIsolatedNodes);
                            nodes = isIsolatedNodes[false].ToList();

                            // If there are fewer than 2 nodes remaining after segregating the isolated nodes, we're done.
                            if (nodes.Count <= 1)
                            {
                                break;
                            }
                        }

                        // All of the remaining nodes have at least one shared match in some other cluster.
                        // Make a larger cluster by joining the smallest cluster with the other node that has the greatest overlap with it.
                        var smallestNode          = nodes.OrderBy(node => node.NumChildren).First();
                        var smallestNodeLeafNodes = new HashSet <int>(smallestNode.GetOrderedLeafNodesIndexes());
                        var otherNode             = nodes
                                                    .Where(node => node != smallestNode)
                                                    .OrderByDescending(node => smallestNodeLeafNodes.Intersect(node.GetOrderedLeafNodesIndexes()).Count())
                                                    .ThenBy(node => node.NumChildren)
                                                    .First();
                        clusterNode = new ClusterNode(otherNode, smallestNode, double.PositiveInfinity, distanceMetric);
                    }
                    else
                    {
                        var firstNode = neighborToCluster.Node;
                        var first     = firstNode.GetHighestParent();
                        var second    = secondNode.GetHighestParent();
                        clusterNode   = new ClusterNode(first, second, neighborToCluster.DistanceSquared, distanceMetric);
                    }

                    var nodesToRemove = GetNodesToRemove(clusterNode);

                    var nodesWithRemovedNeighbors = new HashSet <LeafNode>(await RemoveNodesAsync(nodes, nodesToRemove.ToList()));

                    nodes.Remove(clusterNode.First);
                    nodes.Remove(clusterNode.Second);

                    // The first and last leaf nodes in the new cluster cannot have each other as neighbors.
                    if (clusterNode.FirstLeaf.NeighborsByDistance.RemoveAll(neighbor => clusterNode.SecondLeaf == neighbor.Node) > 0)
                    {
                        nodesWithRemovedNeighbors.Add(clusterNode.FirstLeaf);
                    }
                    if (clusterNode.SecondLeaf.NeighborsByDistance.RemoveAll(neighbor => clusterNode.FirstLeaf == neighbor.Node) > 0)
                    {
                        nodesWithRemovedNeighbors.Add(clusterNode.SecondLeaf);
                    }

                    await RecalculateNeighborsAsync(nodes, nodesWithRemovedNeighbors, distanceMetric);

                    nodes.Add(clusterNode);

                    progressData.Increment();
                }

                // If any isolated nodes were found, add them to the end in order of decreasing size.
                if (isolatedNodes.Count > 0)
                {
                    var nodesLargestFirst = isolatedNodes.OrderByDescending(n => n.NumChildren).ToList();
                    var node = nodesLargestFirst.First();
                    foreach (var otherNode in nodesLargestFirst.Skip(1))
                    {
                        node = new ClusterNode(node, otherNode, double.PositiveInfinity, distanceMetric);
                    }

                    if (nodes.Count > 0)
                    {
                        node = new ClusterNode(nodes.Last(), node, double.PositiveInfinity, distanceMetric);
                        nodes.Remove(nodes.Last());
                    }
                    nodes.Add(node);
                }
            });

            progressData.Reset("Done");

            return(nodes.OfType <ClusterNode>().ToList());
        }
Exemple #46
0
		private static void ReadMesh(XmlReader xmlTree, MeshGroup meshGroup, double scale, ProgressData progressData)
		{
			List<Vector3> vertices = new List<Vector3>();
			while (xmlTree.Read())
			{
				switch (xmlTree.Name)
				{
					case "vertices":
						using (XmlReader verticesTree = xmlTree.ReadSubtree())
						{
							ReadVertices(verticesTree, vertices, scale, progressData);
							if (progressData.LoadCanceled)
							{
								return;
							}
						}
						break;

					case "volume":
						string materialId = xmlTree["materialid"];
						Mesh loadedMesh = null;
						using (XmlReader volumeTree = xmlTree.ReadSubtree())
						{
							loadedMesh = ReadVolume(volumeTree, vertices, progressData);
							if (progressData.LoadCanceled)
							{
								return;
							}
							meshGroup.Meshes.Add(loadedMesh);
						}
						if (loadedMesh != null && materialId != null)
						{
							MeshMaterialData material = MeshMaterialData.Get(loadedMesh);
							material.MaterialIndex = int.Parse(materialId);
						}
						break;
				}
			}
		}
Exemple #47
0
        private static async Task CalculateNeighborsAsync(List <LeafNode> leafNodesAll, List <LeafNode> leafNodesToRecalculate, IDistanceMetric distanceMetric, ProgressData progressData)
        {
            var buckets = leafNodesAll
                          .SelectMany(leafNode => distanceMetric.SignificantCoordinates(leafNode.Coords).Select(coord => new { Coord = coord, LeafNode = leafNode }))
                          .GroupBy(pair => pair.Coord, pair => pair.LeafNode)
                          .ToDictionary(g => g.Key, g => g.ToList());

            var calculateNeighborsByDistanceTasks = leafNodesToRecalculate.Select(async leafNode =>
            {
                leafNode.NeighborsByDistance = await Task.Run(() => GetNeighborsByDistance(leafNode, buckets, distanceMetric));
                progressData?.Increment();
            });

            await Task.WhenAll(calculateNeighborsByDistanceTasks);
        }
Exemple #48
0
		private static Mesh ReadVolume(XmlReader xmlTree, List<Vector3> vertices, ProgressData progressData)
		{
			Mesh newMesh = new Mesh();
			while (xmlTree.Read())
			{
				if (xmlTree.Name == "triangle")
				{
					using (XmlReader triangleTree = xmlTree.ReadSubtree())
					{
						while (triangleTree.Read())
						{
							int[] indices = new int[3];
							while (triangleTree.Read())
							{
								switch (triangleTree.Name)
								{
									case "v1":
										string v1 = triangleTree.ReadString();
										indices[0] = int.Parse(v1);
										break;

									case "v2":
										string v2 = triangleTree.ReadString();
										indices[1] = int.Parse(v2);
										break;

									case "v3":
										string v3 = triangleTree.ReadString();
										indices[2] = int.Parse(v3);
										break;

									case "map":
										using (XmlReader mapTree = triangleTree.ReadSubtree())
										{
										}
										// a texture map, has u1...un and v1...vn
										break;

									default:
										break;
								}
							}
							if (indices[0] != indices[1]
								&& indices[0] != indices[2]
								&& indices[1] != indices[2]
								&& vertices[indices[0]] != vertices[indices[1]]
								&& vertices[indices[1]] != vertices[indices[2]]
								&& vertices[indices[2]] != vertices[indices[0]])
							{
								Vertex[] triangle = new Vertex[]
                                {
                                    newMesh.CreateVertex(vertices[indices[0]], CreateOption.CreateNew, SortOption.WillSortLater),
                                    newMesh.CreateVertex(vertices[indices[1]], CreateOption.CreateNew, SortOption.WillSortLater),
                                    newMesh.CreateVertex(vertices[indices[2]], CreateOption.CreateNew, SortOption.WillSortLater),
                                };
								newMesh.CreateFace(triangle, CreateOption.CreateNew);
							}

							bool continueProcessing;
							progressData.ReportProgress0To50(out continueProcessing);
							if (!continueProcessing)
							{
								// this is what we should do but it requires a bit more debugging.
								return null;
							}
						}
					}
				}
			}
			return newMesh;
		}
Exemple #49
0
 public void GoToGameOver()
 {
     ProgressData.LoadFromDisk(true);
     SceneManager.LoadScene(this.Destination);
 }
Exemple #50
0
		private static void ReadVertices(XmlReader xmlTree, List<Vector3> vertices, double scale, ProgressData progressData)
		{
			while (xmlTree.Read())
			{
				if (xmlTree.Name == "vertices")
				{
					using (XmlReader verticesTree = xmlTree.ReadSubtree())
					{
						while (verticesTree.Read())
						{
							if (xmlTree.Name == "vertex")
							{
								using (XmlReader vertexTree = verticesTree.ReadSubtree())
								{
									while (vertexTree.Read())
									{
										if (vertexTree.Name == "coordinates")
										{
											using (XmlReader coordinatesTree = vertexTree.ReadSubtree())
											{
												Vector3 position = new Vector3();
												while (coordinatesTree.Read())
												{
													switch (coordinatesTree.Name)
													{
														case "x":
															string x = coordinatesTree.ReadString();
															position.x = double.Parse(x);
															break;

														case "y":
															string y = coordinatesTree.ReadString();
															position.y = double.Parse(y);
															break;

														case "z":
															string z = coordinatesTree.ReadString();
															position.z = double.Parse(z);
															break;

														default:
															break;
													}
												}
												position *= scale;
												vertices.Add(position);
											}
											bool continueProcessing;
											progressData.ReportProgress0To50(out continueProcessing);
										}
									}
								}
							}
						}
					}
				}
			}
		}
Exemple #51
0
        private void trainingProgress(ProgressData progress)
        {
            try
            {
                //
                Application.Current.Dispatcher.BeginInvoke(
                    DispatcherPriority.Background,
                    new Action(

                        () =>
                {
                    TrainingProgress.Iteration = $"{progress.EpochCurrent} of {progress.EpochTotal}";

                    //TrainingProgress.TestEvaluation = $"{progress.ValidationEval.ToString("0.000")}";
                    //TrainingProgress.EvaluationFunction = $"'{progress.EvaluationFunName}' evaluation";


                    if (!double.IsNaN(progress.MinibatchAverageLoss) && !double.IsInfinity(progress.MinibatchAverageLoss))
                    {
                        TrainingProgress.MBLossValue.Add(new PointPair(progress.EpochCurrent, progress.MinibatchAverageLoss));
                        TrainingProgress.TrainingLoss = $"{progress.MinibatchAverageLoss.ToString("0.000")}";
                    }
                    else
                    {
                        TrainingProgress.TrainingLoss = $"Infinity";
                    }



                    if (!double.IsNaN(progress.MinibatchAverageEval) && !double.IsInfinity(progress.MinibatchAverageEval))
                    {
                        TrainingProgress.MBEvaluationValue.Add(new PointPair(progress.EpochCurrent, progress.MinibatchAverageEval));
                    }


                    if (!double.IsNaN(progress.TrainEval) && !double.IsInfinity(progress.TrainEval))
                    {
                        TrainingProgress.TrainEvalValue.Add(new PointPair(progress.EpochCurrent, progress.TrainEval));
                    }
                    else
                    {
                        TrainingProgress.TrainEvalValue.Add(new PointPair(progress.EpochCurrent, double.NaN));
                    }

                    if (!double.IsNaN(progress.ValidationEval) && !double.IsInfinity(progress.ValidationEval))
                    {
                        TrainingProgress.ValidationEvalValue.Add(new PointPair(progress.EpochCurrent, progress.ValidationEval));
                    }
                    else
                    {
                        TrainingProgress.ValidationEvalValue.Add(new PointPair(progress.EpochCurrent, double.NaN));
                    }


                    //update Graphs
                    if (UpdateTrainingtGraphs != null)
                    {
                        UpdateTrainingtGraphs(progress.EpochCurrent, progress.MinibatchAverageLoss, progress.MinibatchAverageEval, progress.TrainEval, progress.ValidationEval);
                    }
                    //set status message
                    var appCnt = anndotnet.wnd.App.Current.MainWindow.DataContext as AppController;
                    if (progress.EpochCurrent == 0)
                    {
                        appCnt.StatusMessage = $"Training process is about to start. Please wait!";
                    }
                    else
                    {
                        appCnt.StatusMessage = $"Iteration:{progress.EpochCurrent} of {progress.EpochTotal} processed!";
                    }
                }

                        ));
            }
            catch (Exception ex)
            {
                var appCnt = anndotnet.wnd.App.Current.MainWindow.DataContext as AppController;
                appCnt.ReportException(ex);
            }
        }
Exemple #52
0
        public void WriteFile(System.IO.Stream file, FileOrDirectory src)
        {
            FtpClient ftp = null;
            try {
                if (file == null) return;

                ftp = FtpConnections.Open(IsSource, ref url);

                var path = Url.Path() + "/" + src.Name;
                var start = DateTime.Now;

                if (!UseFXP) {
                    if (ftp.FileTransferType != TransferType.Binary) ftp.FileTransferType = TransferType.Binary;
                    if (TransferProgress) {
                        progress[ftp] = new ProgressData { ElapsedTime = new TimeSpan(0), Path = path, Size = src.Size };
                        ftp.TransferProgress += ShowProgress;
                    }
                    ftp.PutFile(file, src.Name, FileAction.Create);
                } else { // use FXP for direct server to server transfer.
                    var srcftp = (FtpStream)file;
                    srcftp.Client.FxpCopy(src.Name, ftp);
                }
                ftp.SetDateTime(src.Name, src.ChangedUtc);

                Log.Upload(path, src.Size, DateTime.Now - start);
            } catch (Exception e) {
                Sync.Failure(src, e, ftp);
            } finally {
                if (ftp != null) {
                    if (TransferProgress) {
                        ftp.TransferProgress -= ShowProgress;
                        progress.Remove(ftp);
                    }
                    FtpConnections.Pass(ftp);
                }
            }
        }
Exemple #53
0
		private void bw_DoWork(object sender, DoWorkEventArgs e)
		{

			BulkCopyArgs args = e.Argument as BulkCopyArgs;
			if (args == null)
				return;

			BulkCopyResult result = new BulkCopyResult();


			ProgressData pd = new ProgressData();
			pd.Total = args.Objects.Count;
			SqlTransaction tr = null;
			string copyErrors = String.Empty;
			bool copyCancelled = false;
			SqlConnection sourceConn = null;
			SqlConnection destConn = null;

			try
			{
				sourceConn = _cp.CreateSqlConnection(true, false);
				destConn = _cpDest.CreateSqlConnection(true, false);
				try
				{
					if (args.StopOnError)
						tr = destConn.BeginTransaction();


					try
					{
						if (rbEmpty.Checked)
						{
							DeleteData(destConn, tr, args.Objects);
						}
						else if (rbEmptyAll.Checked)
						{
							DeleteAllData(destConn, tr);						
						}
					}
					catch (Exception ex)
					{
						if (args.StopOnError)
							tr.Rollback();

						result.Errors += SmoHelpers.FormatExceptionMsg(ex);
						return;
					}

					foreach (DbObjectList.DbObjectInfo selObj in args.Objects)
					{
						try
						{
							pd.Info = selObj.Name;
							pd.Progress++;

							if (_cancelled || bw.CancellationPending)
							{
								e.Cancel = true;
								if (args.StopOnError)
									tr.Rollback();

								break;
							}
							else
							{
								bw.ReportProgress(0, pd);
								if (!LoadDataAndCopy(selObj.SchemaAndName, args.CopyOptions, sourceConn, destConn, tr, out copyErrors, out copyCancelled))
								{
									if (args.StopOnError)
										tr.Rollback();

									if (copyCancelled)
									{
										e.Cancel = true;
										break;
									}


									result.Errors += copyErrors;
									if (args.StopOnError)
										break;
								}
							}
						}
						catch (Exception ex)
						{
							if (args.StopOnError)
							{
								tr.Rollback();
								result.Errors += SmoHelpers.FormatExceptionMsg(ex, "Table/View: " + selObj.Name);
								break;
							}
							else
							{
								result.Errors += SmoHelpers.FormatExceptionMsg(ex, "Table/View: " + selObj.Name);
							}
						}
					}

					if (args.StopOnError && tr.Connection != null)
						tr.Commit();
				}
				catch (Exception ex)
				{
					result.Errors += SmoHelpers.FormatExceptionMsg(ex);
				}
			}
			finally
			{
				e.Result = result;

				if (sourceConn != null)
				{
					if (sourceConn.State != ConnectionState.Closed)
						sourceConn.Close();
					sourceConn.Dispose();
				}

				if (destConn != null)
				{
					if (destConn.State != ConnectionState.Closed)
						destConn.Close();
					destConn.Dispose();
				}
			}
		}
        /// <summary>
        /// �i���l�̐ݒ�
        /// </summary>
        private void SetProgress()
        {
            ProgressData data = GetProgressValue();
            int progress = data.progress;

            transactionTime += updateTimer.Interval;

            if (0 < progress)
            {
                double remaining = (data.maxProgress - progress) / (double) progress;
                TimeSpan restTransactionTime = TimeSpan.FromMilliseconds(transactionTime * remaining);
                string restTransValue = GetTimeSpanToFormat(restTransactionTime);

                this.Text = string.Format(Properties.Resources.RestTimeTitle, restTransValue);
                restTimeLabel.Text = restTransValue;
            }

            if (preProgressData.Equals(data) == false)
            {
                progressBar.Value = (progress * 100) / data.maxProgress;

                targetFolderLabel.Text = string.Format(Properties.Resources.FolderInfoMessage, data.maxProgress);
                targetLabel.Text = data.targetFolderName;
                currentLabel.Text = data.currentFileName;

                if (progress == data.maxProgress)
                {
                    this.Close();
                }

                preProgressData = data;
            }
        }
 public void Dispose()
 {
     progress.Report(ProgressData.FinishedSuccessfully());
 }
Exemple #56
0
 public void UpdateSlots(ProgressData.SlotWrapper[] slots)
 {
     updateSlotParadigm(slots);
 }
Exemple #57
0
        public async Task DownloadSubtitleAsync(string movieRootDir, SubtitleData subtitleData, ProgressData progress = null, CancellationToken cancellationToken = default(CancellationToken))
        {
            try
            {
                var allSubtitles = new List <SubtitleDataDetail>();

                if (subtitleData.Subtitles != null)
                {
                    allSubtitles.AddRange(subtitleData.Subtitles);
                }

                Utils.CreateSubtitleDataDirectoris(movieRootDir, subtitleData);

                movieRootDir = Path.Combine(movieRootDir, "Subtitles");

                int total        = allSubtitles.Count;
                int currentIndex = 1;

                if (subtitleData.Subtitles != null)
                {
                    foreach (var sub in subtitleData.Subtitles)
                    {
                        if (cancellationToken.IsCancellationRequested)
                        {
                            throw new TaskCanceledException("Operation canceled by user.");
                        }
                        progress?.Report(currentIndex, total);
                        currentIndex++;

                        string fileName = Utils.RenameToPhisicalName($"{sub.Title}-{sub.Id}.zip");
                        string fileDir  = Path.Combine(movieRootDir, "Fa");
                        if (sub.SeasonNumber.HasValue)
                        {
                            fileDir = Path.Combine(fileDir, $"Season {sub.SeasonNumber.Value}");
                        }

                        fileName = Path.Combine(fileDir, fileName);
                        await Utils.DownloadFileAsync(fileName, sub.Link);
                    }
                }
            }
            catch (TaskCanceledException ex)
            {
                throw ex;
            }
            catch (Exception ex)
            {
                throw ex;
            }
        }
Exemple #58
0
		public static List<MeshGroup> ParseFileContents(Stream amfStream, ReportProgressRatio reportProgress)
		{
			Stopwatch time = new Stopwatch();
			time.Start();

			Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture;

			double parsingFileRatio = .5;

			if (amfStream == null)
			{
				return null;
			}

			List<MeshGroup> meshGroups = null;

			// do the loading
			try
			{
				using (Stream amfCompressedStream = GetCompressedStreamIfRequired(amfStream))
				{
					XmlReader xmlTree = XmlReader.Create(amfCompressedStream);
					while (xmlTree.Read())
					{
						if (xmlTree.Name == "amf")
						{
							break;
						}
					}
					double scale = GetScaling(xmlTree);

					ProgressData progressData = new ProgressData(amfStream, reportProgress);

					meshGroups = new List<MeshGroup>();

					while (xmlTree.Read())
					{
						if (xmlTree.Name == "object")
						{
							using (XmlReader objectTree = xmlTree.ReadSubtree())
							{
								meshGroups.Add(ReadObject(objectTree, scale, progressData));
								if (progressData.LoadCanceled)
								{
									return null;
								}
							}
						}
					}

					xmlTree.Dispose();
				}
			}
			catch (Exception e)
			{
				Debug.Print(e.Message);
				BreakInDebugger();
				return null;
			}

#if true
			// merge all the vetexes that are in the same place together
			int totalMeshes = 0;
			foreach (MeshGroup meshGroup in meshGroups)
			{
				foreach (Mesh mesh in meshGroup.Meshes)
				{
					totalMeshes++;
				}
			}

			double currentMeshProgress = 0;
			double ratioLeftToUse = 1 - parsingFileRatio;
			double progressPerMesh = 1.0 / totalMeshes * ratioLeftToUse;
			foreach (MeshGroup meshGroup in meshGroups)
			{
				foreach (Mesh mesh in meshGroup.Meshes)
				{
					bool keepProcessing = true;
					mesh.CleanAndMergMesh(
						(double progress0To1, string processingState, out bool continueProcessing) =>
						{
							if (reportProgress != null)
							{
								double currentTotalProgress = parsingFileRatio + currentMeshProgress;
								reportProgress(currentTotalProgress + progress0To1 * progressPerMesh, processingState, out continueProcessing);
								keepProcessing = continueProcessing;
							}
							else
							{
								continueProcessing = true;
							}
						}
						);
					if (!keepProcessing)
					{
						amfStream.Close();
						return null;
					}
					currentMeshProgress += progressPerMesh;
				}
			}
#endif

			time.Stop();
			Debug.WriteLine(string.Format("AMF Load in {0:0.00}s", time.Elapsed.TotalSeconds));

			amfStream.Close();
			bool hasValidMesh = false;
			foreach (MeshGroup meshGroup in meshGroups)
			{
				foreach (Mesh mesh in meshGroup.Meshes)
				{
					if (mesh.Faces.Count > 0)
					{
						hasValidMesh = true;
					}
				}
			}
			if (hasValidMesh)
			{
				return meshGroups;
			}
			else
			{
				return null;
			}
		}
Exemple #59
0
        /// <summary>
        /// Calback from the training in order to inform user about trining progress
        /// </summary>
        /// <param name="trParams"></param>
        /// <param name="trainer"></param>
        /// <param name="network"></param>
        /// <param name="mbs"></param>
        /// <param name="epoch"></param>
        /// <param name="progress"></param>
        /// <param name="device"></param>
        /// <returns></returns>
        protected virtual ProgressData progressTraining(TrainingParameters trParams, Trainer trainer,
                                                        Function network, MinibatchSourceEx mbs, int epoch, TrainingProgress progress, DeviceDescriptor device)
        {
            //calculate average training loss and evaluation
            var mbAvgLoss = trainer.PreviousMinibatchLossAverage();
            var mbAvgEval = trainer.PreviousMinibatchEvaluationAverage();
            var vars      = InputVariables.Union(OutputVariables).ToList();
            //get training dataset
            double trainEval = mbAvgEval;

            //sometimes when the data set is huge validation model against
            // full training dataset could take time, so we can skip it by setting parameter 'FullTrainingSetEval'
            if (trParams.FullTrainingSetEval)
            {
                if (m_TrainData == null || m_TrainData.Values.Any(x => x.data.IsValid == false))
                {
                    using (var streamDatat = MinibatchSourceEx.GetFullBatch(mbs.Type, mbs.TrainingDataFile, mbs.StreamConfigurations, device))
                    {
                        //get full training dataset
                        m_TrainData = MinibatchSourceEx.ToMinibatchData(streamDatat, vars, mbs.Type);
                    }
                    //perform evaluation of the current model on whole training dataset
                    trainEval = trainer.TestMinibatch(m_TrainData, device);
                }
            }

            string bestModelPath = m_bestModelPath;
            double validEval     = 0;

            //in case validation data set is empty don't perform test-minibatch
            if (!string.IsNullOrEmpty(mbs.ValidationDataFile))
            {
                if (m_ValidationData == null || m_ValidationData.Values.Any(x => x.data.IsValid == false))
                {
                    //get validation dataset
                    using (var streamData = MinibatchSourceEx.GetFullBatch(mbs.Type, mbs.ValidationDataFile, mbs.StreamConfigurations, device))
                    {
                        //store validation data for future testing
                        m_ValidationData = MinibatchSourceEx.ToMinibatchData(streamData, vars, mbs.Type);
                    }
                }
                //perform evaluation of the current model with validation dataset
                validEval = trainer.TestMinibatch(m_ValidationData, device);
            }

            //here we should decide if the current model worth to be saved into temp location
            // depending of the Evaluation function which sometimes can be better if it is greater that previous (e.g. ClassificationAccuracy)
            if (isBetterThanPrevious(trainEval, validEval, StatMetrics.IsGoalToMinimize(trainer.EvaluationFunction())) && trParams.SaveModelWhileTraining)
            {
                //save model
                var strFilePath = $"{trParams.ModelTempLocation}\\model_at_{epoch}of{trParams.Epochs}_epochs_TimeSpan_{DateTime.Now.Ticks}";
                if (!Directory.Exists(trParams.ModelTempLocation))
                {
                    Directory.CreateDirectory(trParams.ModelTempLocation);
                }

                //save temp model
                network.Save(strFilePath);

                //set training and validation evaluation to previous state
                m_PrevTrainingEval   = trainEval;
                m_PrevValidationEval = validEval;
                bestModelPath        = strFilePath;

                var tpl = Tuple.Create <double, double, string>(trainEval, validEval, strFilePath);
                m_ModelEvaluations.Add(tpl);
            }


            m_bestModelPath = bestModelPath;

            //create progressData object
            var prData = new ProgressData();

            prData.EpochTotal           = trParams.Epochs;
            prData.EpochCurrent         = epoch;
            prData.EvaluationFunName    = trainer.EvaluationFunction().Name;
            prData.TrainEval            = trainEval;
            prData.ValidationEval       = validEval;
            prData.MinibatchAverageEval = mbAvgEval;
            prData.MinibatchAverageLoss = mbAvgLoss;

            //the progress is only reported if satisfied the following condition
            if (progress != null && (epoch % trParams.ProgressFrequency == 0 || epoch == 1 || epoch == trParams.Epochs))
            {
                //add info to the history
                m_trainingHistory.Add(new Tuple <int, float, float, float, float>(epoch, (float)mbAvgLoss, (float)mbAvgEval,
                                                                                  (float)trainEval, (float)validEval));

                //send progress
                progress(prData);
                //
                //Console.WriteLine($"Epoch={epoch} of {trParams.Epochs} processed.");
            }

            //return progress data
            return(prData);
        }
 private void InitWebClientAsync()
 {
     if (!this.m_InitWebClientAsync)
     {
         this.openReadOperationCompleted = new SendOrPostCallback(this.OpenReadOperationCompleted);
         this.openWriteOperationCompleted = new SendOrPostCallback(this.OpenWriteOperationCompleted);
         this.downloadStringOperationCompleted = new SendOrPostCallback(this.DownloadStringOperationCompleted);
         this.downloadDataOperationCompleted = new SendOrPostCallback(this.DownloadDataOperationCompleted);
         this.downloadFileOperationCompleted = new SendOrPostCallback(this.DownloadFileOperationCompleted);
         this.uploadStringOperationCompleted = new SendOrPostCallback(this.UploadStringOperationCompleted);
         this.uploadDataOperationCompleted = new SendOrPostCallback(this.UploadDataOperationCompleted);
         this.uploadFileOperationCompleted = new SendOrPostCallback(this.UploadFileOperationCompleted);
         this.uploadValuesOperationCompleted = new SendOrPostCallback(this.UploadValuesOperationCompleted);
         this.reportDownloadProgressChanged = new SendOrPostCallback(this.ReportDownloadProgressChanged);
         this.reportUploadProgressChanged = new SendOrPostCallback(this.ReportUploadProgressChanged);
         this.m_Progress = new ProgressData();
         this.m_InitWebClientAsync = true;
     }
 }