protected internal override void HandleDataReadComplete(object sender, ExportCompleteArgs e) { _sw.WriteLine("</tbody></table></body></html>"); _sw.Flush(); _sw.Close(); base.BubbleUpCompletedEvent(e); }
// collect Commence rowvalues as jagged array, internal void GetDataByAPI() { int rowsProcessed = 0; string[][] rawdata = null; for (int rows = 0; rows < totalRows; rows += numRows) { try { rawdata = cursor.GetRawData(numRows); // first dimension is rows, second dimension is columns } catch (CommenceCOMException) { throw; } rowsProcessed += numRows; var data = ProcessDataBatch(rawdata); // raise 'progress' event CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(data, rowsProcessed > totalRows ? totalRows : rowsProcessed, totalRows); OnDataProgressChanged(args); // raise event after each batch of rows } // raise 'done' event ExportCompleteArgs e = new ExportCompleteArgs(totalRows); OnDataReadCompleted(e); // done with reading data }
/// <summary> /// Writes the object to file in JSON format. /// </summary> /// <param name="sender">sender.</param> /// <param name="e"><see cref="ExportCompleteArgs"/>.</param> protected internal override void HandleDataReadComplete(object sender, ExportCompleteArgs e) { _sw.Flush(); _sw.Close(); using (StreamWriter sw = new StreamWriter(_fileName)) { using (JsonTextWriter jtw = new JsonTextWriter(sw)) { jtw.WriteStartObject(); foreach (var o in _jc.MetaData) { jtw.WritePropertyName(o.Key); jtw.WriteValue(o.Value); } jtw.WritePropertyName("Items"); jtw.WriteStartArray(); using (StreamReader tr = new StreamReader(_tempFile)) { jtw.WriteRaw(tr.ReadToEnd()); } jtw.WriteEndArray(); jtw.WriteEndObject(); } } base.BubbleUpCompletedEvent(e); }
/// <summary> /// Reads the Commence database in a asynchronous fashion /// The idea is that the reading of Commence data continues as the event consumers do their thing. /// </summary> internal void GetDataByAPIAsync() // a bad method name, suggestion async { int rowsProcessed = 0; var values = new BlockingCollection <CmcData>(); var readTask = Task.Factory.StartNew(() => { try { for (int rows = 0; rows < totalRows; rows += numRows) { string[][] rawdata = cursor.GetRawData(numRows); // first dimension is rows, second dimension is columns { if (CTS.Token.IsCancellationRequested) { break; } rowsProcessed += numRows; CmcData rowdata = new CmcData() { Data = rawdata, RowsProcessed = rowsProcessed > totalRows ? totalRows : rowsProcessed }; values.Add(rowdata); } } } catch { CTS.Cancel(); // cancel data read throw; // rethrow the event. If we didn't do this, all errors would be swallowed } finally { values.CompleteAdding(); } }, TaskCreationOptions.LongRunning); var processTask = Task.Factory.StartNew(() => { foreach (var value in values.GetConsumingEnumerable()) { if (CTS.Token.IsCancellationRequested) { break; } var data = ProcessDataBatch(value.Data); CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(data, value.RowsProcessed, totalRows); OnDataProgressChanged(args); // raise event after each batch of rows } }, TaskCreationOptions.LongRunning); Task.WaitAll(readTask, processTask); // we need to wait all before we give the 'done' signal. values.Dispose(); // raise 'done' event ExportCompleteArgs e = new ExportCompleteArgs(totalRows); OnDataReadCompleted(e); // done with reading data }
protected internal override void HandleDataReadComplete(object sender, ExportCompleteArgs e) { if (_spreadSheetDocument != null) { _spreadSheetDocument.Dispose(); } // saves, flushes and disposes. base.BubbleUpCompletedEvent(e); }
protected internal override void HandleDataReadComplete(object sender, ExportCompleteArgs e) { DataSetSerializer dse = new DataSetSerializer(this._ds, this._filename, base._settings); try { dse.Export(); } catch { } base.BubbleUpCompletedEvent(e); }
/// <summary> /// Used to bubble up the Export completed event /// </summary> /// <param name="e">ExportCompleteArgs</param> protected virtual void OnExportCompleted(ExportCompleteArgs e) { ExportCompletedHandler handler = WriterCompleted; if (handler == null) { return; } // no subscriptions Delegate[] eventHandlers = handler.GetInvocationList(); foreach (Delegate currentHandler in eventHandlers) { ExportCompletedHandler currentSubscriber = (ExportCompletedHandler)currentHandler; try { currentSubscriber(this, e); } catch { } } }
protected internal override void HandleDataReadComplete(object sender, ExportCompleteArgs e) { cn.Close(); base.BubbleUpCompletedEvent(e); }
protected virtual void OnDataReadCompleted(ExportCompleteArgs e) { DataReadCompleted?.Invoke(this, e); }
/// <summary> /// Reads data using DDE. This is extremely show and should only ever be used as a last resort. /// </summary> /// <param name="mocktables"></param> internal void GetDataByDDE(List <TableDef> mocktables) // needs fixing { /* DDE requests are limited to a maximum length of 255 characters, * which is easily exceeded. A workaround is splitting the requests. * Not pretty but the only way to get to many-many relationships that contain >93750 worth of connected characters * without setting the maxfieldsize higher. */ List <List <CommenceValue> > rows; List <CommenceValue> rowvalues; ICommenceDatabase db = new CommenceDatabase(); // always define a category db.ViewCategory(this.cursor.Category); // are we dealing with a view? if (!string.IsNullOrEmpty(cursor.View)) { db.ViewView(this.cursor.View); } int itemCount = db.ViewItemCount(); for (int i = 1; i <= itemCount; i++) // note that we use a 1-based iterator { rows = new List <List <CommenceValue> >(); rowvalues = new List <CommenceValue>(); foreach (TableDef td in mocktables) { string[] DDEResult = null; List <string> fieldNames = td.ColumnDefinitions.Select(o => o.FieldName).ToList(); if (td.Primary) { // ViewFields and ViewConnectedFields have a limited capacity // the total length of a DDE command cannot exceed 255 characters // What we are going to do is limit the number of characters to a value of up to 150 chars, // to be on the safe side (ViewConnectedFilter and two delimiters already make up 35 characters!) ListChopper lcu = new ListChopper(fieldNames, 150); foreach (List <string> l in lcu.Portions) { DDEResult = db.ViewFields(i, l); // we have our results, we now have to create CommenceValue objects from it // and we also have to match them up with their respective column // this is a little tricky... for (int j = 0; j < DDEResult.Length; j++) { ColumnDefinition cd = td.ColumnDefinitions.Find(o => o.FieldName.Equals(l[j])); string[] buffer = new string[] { DDEResult[j] }; //buffer = FormatValues(buffer,this.Formatting, cd); buffer = FormatValues(buffer, cd); CommenceValue v = new CommenceValue(buffer[0], cd); rowvalues.Add(v); } // for } // list l } else // we are dealing with a connection { int conItemCount = db.ViewConnectedCount(i, td.ColumnDefinitions[0].Connection, td.ColumnDefinitions[0].Category); // doesn't matter which one we use // here's a nice challenge: // upon every iteration we get a row of fieldvalues from the connection // to make things worse, we chop them up so it aren't even complete rows. // we must aggregate the values for each field. // We'll construct a datatable to hack around that; // we could have also used a dictionary I suppose. // using a datatable may be easiest DataTable dt = new DataTable(); for (int c = 0; c < fieldNames.Count; c++) { dt.Columns.Add(fieldNames[c]); // add fields as columns, keeping everything default } // loop all connected items for (int citemcount = 1; citemcount <= conItemCount; citemcount++) { DataRow dr = dt.NewRow(); // create a row containing all columns ListChopper lcu = new ListChopper(fieldNames, 150); foreach (List <string> list in lcu.Portions) { DDEResult = db.ViewConnectedFields(i, td.ColumnDefinitions[0].Connection, td.ColumnDefinitions[0].Category, citemcount, list); // populate colums for the fields we requested for (int j = 0; j < DDEResult.Length; j++) { dr[list[j]] = DDEResult[j]; } } // list l dt.Rows.Add(dr); } // citemcount // create a CommenceValue from every column in the datatable foreach (DataColumn dc in dt.Columns) { // this will also return columns that have no data, which is what we want. string[] query = (from r in dt.AsEnumerable() select r.Field <String>(dc.ColumnName)).ToArray(); ColumnDefinition cd = td.ColumnDefinitions.Find(o => o.FieldName.Equals(dc.ColumnName)); CommenceValue cv = null; if (query.Length > 0) // only create value if there is one { //query = FormatValues(query, this.Formatting, cd); query = FormatValues(query, cd); cv = new CommenceValue(query, cd); } else { // create empty CommenceValue cv = new CommenceValue(cd); } rowvalues.Add(cv); } } // if } // foreach tabledef rows.Add(rowvalues); CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(rows, i, totalRows); // progress within the cursor OnDataProgressChanged(args); } // i db = null; ExportCompleteArgs a = new ExportCompleteArgs(itemCount); OnDataReadCompleted(a); }
protected void BubbleUpCompletedEvent(ExportCompleteArgs e) { OnExportCompleted(e); }
/// <summary> /// Method that deals with any finalization of the export, /// such as writing closing elements and closing streams. /// </summary> /// <param name="sender">sender.</param> /// <param name="e">ExportCompleteArgs.</param> protected internal abstract void HandleDataReadComplete(object sender, ExportCompleteArgs e);
/// <summary> /// Raises the ExportCompleted event /// </summary> /// <param name="e">ExportCompleteArgs</param> protected virtual void OnExportCompleted(ExportCompleteArgs e) { ExportCompleted?.Invoke(this, e); }
/// <summary> /// Event handler for the ExportCompleted event /// </summary> /// <param name="sender">sender object.</param> /// <param name="e">ExportCompleteArgs</param> public virtual void HandleExportCompleted(object sender, ExportCompleteArgs e) { OnExportCompleted(e); }