void Download(object param) { //pass the output file location //manage the download according to if we are only getting a single segment or if we need to loop over auto incremented segments Dictionary <DateTime, DateTime> dateset = new Dictionary <DateTime, DateTime>(); if (param.ToString() == "") { MessageBox.Show("Specify an output location before downloading"); return; } if (LoopDownload) { //create a dateset to loop through DateTime workstart = start; DateTime workend = start; DateTime originalEnd = end; DateTime originalStart = start; while (workend < originalEnd) { if (workend == start) { workend = workend.AddDays(LoopDays); } else { workend = workend.AddDays(LoopDays); workstart = workstart.AddDays(LoopDays); } dateset.Add(workstart, workend); } } else { dateset.Add(start, end); } _isDownloading = true; worker = new BackgroundWorker(); worker.WorkerReportsProgress = true; worker.WorkerSupportsCancellation = true; worker.ProgressChanged += delegate(object sender, ProgressChangedEventArgs e) { DownloadProgress = e.ProgressPercentage; Debug.WriteLine(DownloadProgress); }; worker.RunWorkerCompleted += delegate(object sender, RunWorkerCompletedEventArgs e) { if (e.Cancelled != true) { OutputFile(param); } }; worker.DoWork += delegate(object sender, DoWorkEventArgs args) { PIDownloader pi = new PIDownloader(_dst); int tagdownloadcount = 0; Dictionary <DateTime, double> TagData = new Dictionary <DateTime, double>(); TagManager tmReference = null; if (TagManagers.Count == 0) { tmReference = new TagManager(_project.tags); TagManagers.Add(tmReference); } else { foreach (TagManager tm in TagManagers) { //if no tags are found--assumed to be a new dataset;add a new tag manager tmReference = tm; //add the tags that are not already in the tag manager's tags collection foreach (Tag t in _project.tags) { if (!tm.Tags.Contains(t)) { tm.Tags.Add(t); } } } } //download the tag data for the current dataset foreach (KeyValuePair <DateTime, DateTime> dates in dateset) { if (DownloadIsCanceled) { worker.CancelAsync(); DownloadedData.Clear(); _isDownloading = false; DownloadIsCanceled = false; DownloadProgressText = string.Empty; worker.ReportProgress(0); break; } start = dates.Key; end = dates.Value; foreach (Tag tag in _project.tags) { bool runagain = true; int runcount = 0; while (runagain) { try { if (Compressed) { tmReference[tag.TagName].StringData = pi.Download(tag.TagName, start, end, Compressed); if (tag.StringData != null) { DownloadProgressText = tag.StringData.Count + " Records Downloaded: " + tag.TagName; } } else { tmReference[tag.TagName].Data = pi.Download(tag.TagName, start, end); if (tag.Data != null) { DownloadProgressText = tag.Data.Count + " Records Downloaded: " + tag.TagName; } } runagain = false; } catch (Exception e) { if (e.GetType() == typeof(OutOfMemoryException)) { DownloadProgressText = "Error Downloading " + tag.TagName + " " + e.Message + " Canceling.."; worker.CancelAsync(); } else { //Console.WriteLine(tag.TagName + " " + e.Message); DownloadProgressText = "Error Downloading " + tag.TagName + " " + e.Message + " Attempt " + runcount; runcount++; runagain = true; } } } tagdownloadcount++; int progress = (int)(double.Parse(tagdownloadcount.ToString()) / double.Parse(Project.tags.Count().ToString()) * 100); Console.WriteLine(tag.TagName + " segment 4" + tag.TagNameElement(4) + " " + progress.ToString()); worker.ReportProgress(progress, Project); } //foreach tag //add the downloaded data to a datatable DownloadedData.Clear(); DownloadedData.Add(CreateDateTimeDataTable(tmReference)); //output the file OutputFile(param); tagdownloadcount = 0; } //foreach dateset _isDownloading = false; }; worker.RunWorkerAsync(); }
void Download() { _isDownloading = true; worker = new BackgroundWorker(); worker.WorkerReportsProgress = true; worker.WorkerSupportsCancellation = true; DownloadProgress = 0; worker.ProgressChanged += delegate(object sender, ProgressChangedEventArgs e) { DownloadProgress = e.ProgressPercentage; }; worker.DoWork += delegate(object sender, DoWorkEventArgs args) { PIDownloader pi = new PIDownloader(DataSourceType.WindART_SQL); DownloadProgressText = "Initializing Connection...."; foreach (string config in _configs.selectedItems) { List <Tag> SelectedTags = new List <Tag>(); DownloadProgress = 0; int tagdownloadcount = 0; string sitename = SelectedSite.Substring(0, 7); SelectedTags = SelectTags(sitename); TagManager tmReference = null; tmReference = new TagManager(SelectedTags, sitename); TagManagers.Add(tmReference); foreach (Tag tag in SelectedTags) { if (tag == null) { continue; } string tagname = tag.TagName; if (_downloadCancelled) { tmReference = null; worker.ReportProgress(0); worker.CancelAsync(); _downloadCancelled = false; break; } SortedDictionary <DateTime, double> TagData = new SortedDictionary <DateTime, double>(); StringBuilder sql = new StringBuilder(); sql.Append(@"select time, value from piserver.piarchive..picomp where tag='"); sql.Append(tagname); sql.Append("' and time between '"); sql.Append(tag.StartDate.ToShortDateString()); sql.Append(" 00:00:00"); sql.Append("' and '"); sql.Append(tag.EndDate.ToShortDateString()); sql.Append(" 00:00:00"); sql.Append("' and value is not null"); //System.Diagnostics.Debug.Print(sql.ToString ()); DataTable pidata = data.GetData(sql.ToString()); foreach (DataRow row in pidata.Rows) { DateTime thisDate = (DateTime)row["time"]; if (!TagData.ContainsKey(thisDate)) { // Console.WriteLine(row["time"].ToString() + " " + row["value"].ToString()); TagData.Add(thisDate, Double.Parse(row["value"].ToString())); } else { // Console.WriteLine(row["time"].ToString() + " not found in " + tag.TagName); TagData[thisDate] = -9999.0; } } tmReference[tag.TagName].Data = TagData; tagdownloadcount++; int progress = (int)(double.Parse(tagdownloadcount.ToString()) / double.Parse(SelectedTags.Count().ToString()) * 100); worker.ReportProgress(progress); } //add the downloaded data to a datatable DownloadedData.Add(CreateDateTimeDataTable(tmReference)); } }; worker.RunWorkerAsync(); _isDownloading = false; }
void Download() { _isDownloading = true; worker = new BackgroundWorker(); worker.WorkerReportsProgress = true; worker.WorkerSupportsCancellation = true; worker.ProgressChanged += delegate(object sender, ProgressChangedEventArgs e) { DownloadProgress = e.ProgressPercentage; }; worker.DoWork += delegate(object sender, DoWorkEventArgs args) { PIDownloader pi = new PIDownloader(_dst); int tagdownloadcount = 0; Dictionary <DateTime, double> TagData = new Dictionary <DateTime, double>(); TagManager tmReference = null; if (TagManagers.Count == 0) { tmReference = new TagManager(_project.tags); TagManagers.Add(tmReference); } else { foreach (TagManager tm in TagManagers) { //if no tags are found--assumed to be a new dataset;add a new tag manager tmReference = tm; //add the tags that are not already in the tag manager's tags collection foreach (Tag t in _project.tags) { if (!tm.Tags.Contains(t)) { tm.Tags.Add(t); } } } } //download the tag data for the current dataset foreach (Tag tag in _project.tags) { if (DownloadIsCanceled) { worker.CancelAsync(); DownloadedData.Clear(); _isDownloading = false; DownloadIsCanceled = false; DownloadProgressText = string.Empty; worker.ReportProgress(0); break; } bool runagain = true; int runcount = 0; while (runagain) { try { tmReference[tag.TagName].Data = pi.Download(tag.TagName, start, end); DownloadProgressText = tag.Data.Count + " Records Downloaded: " + tag.TagName; runagain = false; } catch (Exception e) { Console.WriteLine(tag.TagName + " " + e.Message); DownloadProgressText = "Error Downloading " + tag.TagName + " " + e.Message; runcount++; if (runcount > 3) { runagain = false; } else { runagain = true; } } } tagdownloadcount++; int progress = (int)(double.Parse(tagdownloadcount.ToString()) / double.Parse(Project.tags.Count().ToString()) * 100); Console.WriteLine(tag.TagName + " segment 4" + tag.TagNameElement(4) + " " + progress.ToString()); worker.ReportProgress(progress, Project); } //add the downloaded data to a datatable DownloadedData.Clear(); DownloadedData.Add(CreateDateTimeDataTable(tmReference)); tagdownloadcount = 0; _isDownloading = false; }; worker.RunWorkerAsync(); }