private void cleanContext(FetchFullDataSetContext context) { foreach (var tempFilePath in context.TempFiles) { if (File.Exists(tempFilePath)) { File.Delete(tempFilePath); } } }
private void mergeDataSetCompleted(Task task) { toggleBusyState(false); lblOperation.Text = "Export complete"; FetchFullDataSetContext context = (FetchFullDataSetContext)task.AsyncState; cleanContext(context); MessageBox.Show("Processing completed", Text, MessageBoxButtons.OK); }
private void fetchEntireDataSetCompleted(Task task) { toggleBusyState(false); lblExportProgress.Text = "Download complete"; FetchFullDataSetContext context = (FetchFullDataSetContext)task.AsyncState; // cancel the scroll token Task.Factory.StartNew(() => { var conf = new ConnectionConfiguration(new Uri(ClusterUrl)); var client = new ElasticLowLevelClient(conf); var clearRequest = client.ClearScroll <dynamic>(new { scroll_id = new string[] { context.ScrollToken } }); }); bool process = task.Status == TaskStatus.RanToCompletion; if (task.Status == TaskStatus.Faulted) { MessageBox.Show($"Export has failed\r\n{task.Exception.Message}\r\n{task.Exception.InnerException?.Message}", Text, MessageBoxButtons.OK, MessageBoxIcon.Error); } // Let user process data that has already been downloaded if ((task.Status == TaskStatus.Faulted || task.Status == TaskStatus.Canceled) && context.ProcessedDocuments > 0) { if (MessageBox.Show($"{context.ProcessedDocuments} documents were downloaded, would you like to still process those even though this is an incomplete export?", Text, MessageBoxButtons.YesNo, MessageBoxIcon.Question) == DialogResult.Yes) { process = true; } } if (process && sfdExportCSV.ShowDialog() == DialogResult.OK) { context.CSVFileName = sfdExportCSV.FileName; context.TotalDocuments = context.ProcessedDocuments; context.ProcessedDocuments = 0; if (cancellationToken != null) { cancellationToken.Dispose(); } cancellationToken = new CancellationTokenSource(); toggleBusyState(true); lblOperation.Text = "Merging..."; Task.Factory.StartNew(mergeDataSet, context, cancellationToken.Token).ContinueWith(mergeDataSetCompleted, TaskScheduler.FromCurrentSynchronizationContext()); } else { cleanContext(context); } }
private void btnExportDataSet_Click(object sender, EventArgs e) { toggleBusyState(true); if (cancellationToken != null) { cancellationToken.Dispose(); } cancellationToken = new CancellationTokenSource(); var fetchContext = new FetchFullDataSetContext() { Query = txtLuceneQuery.Text, Token = cancellationToken.Token }; fetchTask = Task.Factory.StartNew(fetchEntireDataSet, fetchContext, cancellationToken.Token).ContinueWith(fetchEntireDataSetCompleted, TaskScheduler.FromCurrentSynchronizationContext()); lblOperation.Text = "Downloading..."; }
private void mergeDataSet(object contextObj) { FetchFullDataSetContext context = (FetchFullDataSetContext)contextObj; using (var sw = new StreamWriter(context.CSVFileName, false, Encoding.UTF8)) { sw.AutoFlush = true; using (var csv = new CsvWriter(sw)) { // write columns foreach (string column in context.Columns) { csv.WriteField(column); } csv.NextRecord(); foreach (var tempFile in context.TempFiles) { using (var fs = File.OpenRead(tempFile)) { BinaryFormatter bf = new BinaryFormatter(); DataTable table = (DataTable)bf.Deserialize(fs); foreach (DataRow row in table.Rows) { foreach (var columnName in context.Columns) { csv.WriteField(table.Columns.Contains(columnName) ? row[columnName] : string.Empty); } csv.NextRecord(); context.ProcessedDocuments++; if (context.ProcessedDocuments % 500 == 0) { reportProgress(context.ProcessedDocuments, context.TotalDocuments); } } } reportProgress(context.ProcessedDocuments, context.TotalDocuments); File.Delete(tempFile); } } } }