Пример #1
0
        // collect Commence rowvalues as jagged array,
        internal void GetDataByAPI()
        {
            int rowsProcessed = 0;

            string[][] rawdata = null;
            for (int rows = 0; rows < totalRows; rows += numRows)
            {
                try
                {
                    rawdata = cursor.GetRawData(numRows); // first dimension is rows, second dimension is columns
                }
                catch (CommenceCOMException)
                {
                    throw;
                }
                rowsProcessed += numRows;
                var data = ProcessDataBatch(rawdata);
                // raise 'progress' event
                CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(data, rowsProcessed > totalRows ? totalRows : rowsProcessed, totalRows);
                OnDataProgressChanged(args); // raise event after each batch of rows
            }
            // raise 'done' event
            ExportCompleteArgs e = new ExportCompleteArgs(totalRows);

            OnDataReadCompleted(e); // done with reading data
        }
Пример #2
0
 protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
 {
     foreach (List <CommenceValue> row in e.RowValues)
     {
         List <string> rowvalues = new List <string>();
         foreach (CommenceValue v in row)
         {
             if (v.ColumnDefinition.IsConnection) // connection
             {
                 if (!base._settings.SkipConnectedItems)
                 {
                     if (v.ConnectedFieldValues == null)
                     {
                         rowvalues.Add(base._settings.TextQualifier + String.Join(base._settings.TextDelimiterConnections, string.Empty) + base._settings.TextQualifier);
                     }
                     else
                     {
                         // we concatenate connected values that were split here.
                         // that is not a very good idea.
                         // a much better idea is to not split at all.
                         // not splitting was implemented.
                         // we can leave in the string.Join, we pass just one value to it.
                         rowvalues.Add(base._settings.TextQualifier + String.Join(base._settings.TextDelimiterConnections, v.ConnectedFieldValues) + base._settings.TextQualifier);
                     } // if
                 }     // if
             }         //if
             else
             {
                 rowvalues.Add(base._settings.TextQualifier + v.DirectFieldValue + base._settings.TextQualifier);
             } // else
         }     // foreach
         _sw.WriteLine(String.Join(base._settings.TextDelimiter, rowvalues));
     }         //foreach
     BubbleUpProgressEvent(e);
 }
Пример #3
0
        protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
        {
            StringBuilder sb = new StringBuilder();

            foreach (List <CommenceValue> row in e.RowValues)
            {
                _rowcounter++;
                int colcounter = 0;
                sb.Append("<tr class=\"cmclibnet-item\">");
                foreach (CommenceValue v in row)
                {
                    colcounter++;
                    // we can have either no values, or a direct value, or connected values
                    if (v.IsEmpty)
                    {
                        sb.Append("<td class=\"cmclibnet-value\" id=\"r" + _rowcounter + "c" + colcounter + "\"></td>");
                    }
                    else
                    {
                        string s = v.DirectFieldValue ?? string.Join(base._settings.TextDelimiterConnections, v.ConnectedFieldValues);
                        sb.Append("<td class=\"cmclibnet-value\" id=\"r" + _rowcounter + "c" + colcounter + "\">" + HtmlEncode(s) + "</td>");
                    }
                } // foreach
                sb.Append("</tr>");
            }     // foreach
            _sw.WriteLine(sb.ToString());
            base.BubbleUpProgressEvent(e);
        }
Пример #4
0
        /// <summary>
        /// Reads the Commence database in a asynchronous fashion
        /// The idea is that the reading of Commence data continues as the event consumers do their thing.
        /// </summary>
        internal void GetDataByAPIAsync() // a bad method name, suggestion async
        {
            int rowsProcessed = 0;
            var values        = new BlockingCollection <CmcData>();
            var readTask      = Task.Factory.StartNew(() =>
            {
                try
                {
                    for (int rows = 0; rows < totalRows; rows += numRows)
                    {
                        string[][] rawdata = cursor.GetRawData(numRows); // first dimension is rows, second dimension is columns
                        {
                            if (CTS.Token.IsCancellationRequested)
                            {
                                break;
                            }
                            rowsProcessed  += numRows;
                            CmcData rowdata = new CmcData()
                            {
                                Data          = rawdata,
                                RowsProcessed = rowsProcessed > totalRows ? totalRows : rowsProcessed
                            };
                            values.Add(rowdata);
                        }
                    }
                }
                catch
                {
                    CTS.Cancel(); // cancel data read
                    throw;        // rethrow the event. If we didn't do this, all errors would be swallowed
                }
                finally
                {
                    values.CompleteAdding();
                }
            }, TaskCreationOptions.LongRunning);

            var processTask = Task.Factory.StartNew(() =>
            {
                foreach (var value in values.GetConsumingEnumerable())
                {
                    if (CTS.Token.IsCancellationRequested)
                    {
                        break;
                    }

                    var data = ProcessDataBatch(value.Data);
                    CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(data, value.RowsProcessed, totalRows);
                    OnDataProgressChanged(args); // raise event after each batch of rows
                }
            }, TaskCreationOptions.LongRunning);

            Task.WaitAll(readTask, processTask); // we need to wait all before we give the 'done' signal.
            values.Dispose();
            // raise 'done' event
            ExportCompleteArgs e = new ExportCompleteArgs(totalRows);

            OnDataReadCompleted(e); // done with reading data
        }
 protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
 {
     foreach (List <CommenceValue> row in e.RowValues)
     {
         InsertValues(row);
     }
     BubbleUpProgressEvent(e);
 }
        protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
        {
            // at this point EPPlus has no idea what to do with e.RowValues
            // we need to translate our values to something it understands
            // let's evaluate what we have:
            // e.RowValues contains a List of a List of CommenceValue
            // A list of CommenceValue represents a single item (row)
            // EPPlus cannot use anonymous values in LoadFromCollection,
            // so we will translate our results to a datatable first.
            // we do not use the more advanced ADO functionality in CmcLibNet,
            // just a flat table.

            using (ExcelPackage xl = new ExcelPackage(_fi))
            {
                var ws = xl.Workbook.Worksheets.FirstOrDefault(f => f.Name.Equals(_sheetName));
                if (ws == null)
                {
                    ws = xl.Workbook.Worksheets.Add(_sheetName);
                }

                _dataTable.Rows.Clear();
                foreach (List <CommenceValue> list in e.RowValues) // process rows
                {
                    object[] data = GetDataRowValues(list);
                    _dataTable.Rows.Add(data);
                }

                if (ws.Dimension == null) // first iteration
                {
                    ws.Cells.LoadFromDataTable(_dataTable, _settings.HeadersOnFirstRow);
                    if (_settings.HeadersOnFirstRow)
                    {
                        ws.Cells[1, 1, 1, ws.Dimension.End.Column].Style.Font.Bold = true;
                    }
                }
                else
                {
                    var lastRow = ws.Dimension.End.Row;
                    ws.Cells[lastRow + 1, 1].LoadFromDataTable(_dataTable, false);
                }
                int firstDataRow = _settings.HeadersOnFirstRow ? 2 : 1;
                SetNumberFormatStyles(ws, firstDataRow);
                try
                {
                    ws.Cells.AutoFitColumns(10, 50);
                }
                catch (Exception) { } //throws an error in EPPLus on long strings. https://github.com/JanKallman/EPPlus/issues/445
                xl.Save();
            }
            base.BubbleUpProgressEvent(e);
        }
Пример #7
0
        protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
        {
            // construct data, create eventargs, raise event
            JsonCreator    jc         = new JsonCreator(this);
            List <JObject> list       = jc.SerializeRowValues(e.RowValues);
            var            jsonString = "[" + string.Join(",", list.Select(o => o.ToString())) + "]";
            // do custom bubbling up
            ExportProgressChangedArgs args = new ExportProgressChangedArgs(
                e.RowsProcessed,
                e.RowsTotal,
                jsonString);

            base.OnWriterProgressChanged(args);
        }
Пример #8
0
        /// <summary>
        /// Writes json to a temporary file
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
        {
            StringBuilder  sb         = new StringBuilder();
            List <JObject> list       = _jc.SerializeRowValues(e.RowValues);
            var            jsonString = string.Join(",", list.Select(o => o.ToString()));

            if (!firstRun && !string.IsNullOrEmpty(jsonString))
            {
                _sw.Write(',');  // add record delimiter on any data except first batch
            }
            _sw.Write(jsonString);
            firstRun = false;
            BubbleUpProgressEvent(e);
        }
Пример #9
0
 protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
 {
     try
     {
         foreach (List <CommenceValue> datarow in e.RowValues)
         {
             // pass on rowdata for RowParser
             AdoNetRowWriter rp = new AdoNetRowWriter(_rows_processed, datarow, _ds); // currentrow contains only last row for loop
             rp.ProcessRow();
             _rows_processed++;
         }
         BubbleUpProgressEvent(e);
     }
     catch { }
 }
Пример #10
0
 protected virtual void OnDataProgressChanged(CursorDataReadProgressChangedArgs e)
 {
     DataProgressChanged?.Invoke(this, e);
 }
Пример #11
0
        /// <summary>
        /// Reads data using DDE. This is extremely show and should only ever be used as a last resort.
        /// </summary>
        /// <param name="mocktables"></param>
        internal void GetDataByDDE(List <TableDef> mocktables) // needs fixing
        {
            /* DDE requests are limited to a maximum length of 255 characters,
             * which is easily exceeded. A workaround is splitting the requests.
             * Not pretty but the only way to get to many-many relationships that contain >93750 worth of connected characters
             * without setting the maxfieldsize higher.
             */

            List <List <CommenceValue> > rows;
            List <CommenceValue>         rowvalues;
            ICommenceDatabase            db = new CommenceDatabase();

            // always define a category
            db.ViewCategory(this.cursor.Category);
            // are we dealing with a view?
            if (!string.IsNullOrEmpty(cursor.View))
            {
                db.ViewView(this.cursor.View);
            }
            int itemCount = db.ViewItemCount();

            for (int i = 1; i <= itemCount; i++) // note that we use a 1-based iterator
            {
                rows      = new List <List <CommenceValue> >();
                rowvalues = new List <CommenceValue>();
                foreach (TableDef td in mocktables)
                {
                    string[]      DDEResult  = null;
                    List <string> fieldNames = td.ColumnDefinitions.Select(o => o.FieldName).ToList();
                    if (td.Primary)
                    {
                        // ViewFields and ViewConnectedFields have a limited capacity
                        // the total length of a DDE command cannot exceed 255 characters
                        // What we are going to do is limit the number of characters to a value of up to 150 chars,
                        // to be on the safe side (ViewConnectedFilter and two delimiters already make up 35 characters!)
                        ListChopper lcu = new ListChopper(fieldNames, 150);
                        foreach (List <string> l in lcu.Portions)
                        {
                            DDEResult = db.ViewFields(i, l);

                            // we have our results, we now have to create CommenceValue objects from it
                            // and we also have to match them up with their respective column
                            // this is a little tricky...
                            for (int j = 0; j < DDEResult.Length; j++)
                            {
                                ColumnDefinition cd     = td.ColumnDefinitions.Find(o => o.FieldName.Equals(l[j]));
                                string[]         buffer = new string[] { DDEResult[j] };
                                //buffer = FormatValues(buffer,this.Formatting, cd);
                                buffer = FormatValues(buffer, cd);
                                CommenceValue v = new CommenceValue(buffer[0], cd);
                                rowvalues.Add(v);
                            } // for
                        }     // list l
                    }
                    else      // we are dealing with a connection
                    {
                        int conItemCount = db.ViewConnectedCount(i, td.ColumnDefinitions[0].Connection, td.ColumnDefinitions[0].Category); // doesn't matter which one we use
                        // here's a nice challenge:
                        // upon every iteration we get a row of fieldvalues from the connection
                        // to make things worse, we chop them up so it aren't even complete rows.
                        // we must aggregate the values for each field.
                        // We'll construct a datatable to hack around that;
                        // we could have also used a dictionary I suppose.

                        //  using a datatable may be easiest
                        DataTable dt = new DataTable();
                        for (int c = 0; c < fieldNames.Count; c++)
                        {
                            dt.Columns.Add(fieldNames[c]); // add fields as columns, keeping everything default
                        }

                        // loop all connected items
                        for (int citemcount = 1; citemcount <= conItemCount; citemcount++)
                        {
                            DataRow     dr  = dt.NewRow(); // create a row containing all columns
                            ListChopper lcu = new ListChopper(fieldNames, 150);
                            foreach (List <string> list in lcu.Portions)
                            {
                                DDEResult = db.ViewConnectedFields(i, td.ColumnDefinitions[0].Connection, td.ColumnDefinitions[0].Category, citemcount, list);
                                // populate colums for the fields we requested
                                for (int j = 0; j < DDEResult.Length; j++)
                                {
                                    dr[list[j]] = DDEResult[j];
                                }
                            } // list l
                            dt.Rows.Add(dr);
                        }     // citemcount

                        // create a CommenceValue from every column in the datatable
                        foreach (DataColumn dc in dt.Columns)
                        {
                            // this will also return columns that have no data, which is what we want.
                            string[] query =
                                (from r in dt.AsEnumerable()
                                 select r.Field <String>(dc.ColumnName)).ToArray();
                            ColumnDefinition cd = td.ColumnDefinitions.Find(o => o.FieldName.Equals(dc.ColumnName));
                            CommenceValue    cv = null;
                            if (query.Length > 0) // only create value if there is one
                            {
                                //query = FormatValues(query, this.Formatting, cd);
                                query = FormatValues(query, cd);
                                cv    = new CommenceValue(query, cd);
                            }
                            else
                            {
                                // create empty CommenceValue
                                cv = new CommenceValue(cd);
                            }
                            rowvalues.Add(cv);
                        }
                    } // if
                }     // foreach tabledef
                rows.Add(rowvalues);
                CursorDataReadProgressChangedArgs args = new CursorDataReadProgressChangedArgs(rows, i, totalRows); // progress within the cursor
                OnDataProgressChanged(args);
            } // i
            db = null;
            ExportCompleteArgs a = new ExportCompleteArgs(itemCount);

            OnDataReadCompleted(a);
        }
Пример #12
0
 protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
 {
     // we just always append at this point
     AppendRows(e.RowValues);
     base.BubbleUpProgressEvent(e);
 }
Пример #13
0
 protected internal override void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e)
 {
     AppendToXml(e.RowValues);
     BubbleUpProgressEvent(e);
 }
Пример #14
0
 /// <summary>
 /// Derived classes can use this method to bubble up the ExportProgressChanged event
 /// </summary>
 /// <param name="e">ExportProgressChangedArgs</param>
 protected void BubbleUpProgressEvent(CursorDataReadProgressChangedArgs e)
 {
     OnWriterProgressChanged(new ExportProgressChangedArgs(e.RowsProcessed, e.RowsTotal));
 }
Пример #15
0
 /// <summary>
 /// Method that deals with the data as it is being read.
 /// The minimum amount of expected data is a single list of CommenceValue objects representing a single item (row) in Commence,
 /// but it can also be multiple lists representing Commence items.
 /// It must NOT be a partial Commence item!
 /// </summary>
 /// <param name="sender">sender.</param>
 /// <param name="e">ExportProgressChangedArgs.</param>
 protected internal abstract void HandleProcessedDataRows(object sender, CursorDataReadProgressChangedArgs e);