private void Convert(T obj, FileColumnAttribute col, string value, int LineNbr, AggregatedException ae) { if (value == null || col.PassOver) { if (!col.CanBeNull) { ae.AddException(new WrongFormatException("Value can't be null", typeof(T).ToString(), value, col, LineNbr, m_fileName)); } //throw new Exception(); } else { if (value != null) { value = value.Trim(); if (col.MaxLength != UInt16.MaxValue && value.Length > col.MaxLength) { ae.AddException(new WrongFormatException(" Value too long", typeof(T).ToString(), value, col, LineNbr, m_fileName)); value = value.Substring(0, col.MaxLength); } } try { Object objValue = null; // Normally, either tfi.typeConverter is not null, // or tfi.parseNumberMethod is not null. // if (col.typeConverter != null) { objValue = col.typeConverter.ConvertFromString( null, m_fileDescription.FileCultureInfo, value); } else if (col.parseNumberMethod != null) { if (col.WithOutSeparator) { int pos = col.MaxLength - col.OutputFormat.IndexOf('.'); if (col.MaxLength == value.Length) { value = value.Substring(0, col.MaxLength - pos) + m_fileDescription.FileCultureInfo.NumberFormat.CurrencyDecimalSeparator + value.Substring(col.MaxLength - pos, pos); } else { if (value.Length > pos) { value = value.Substring(0, value.Length - pos) + m_fileDescription.FileCultureInfo.NumberFormat.CurrencyDecimalSeparator + value.Substring(value.Length - pos, pos); } else { ae.AddException(new WrongFormatException(" Value too long", typeof(T).ToString(), value, col, LineNbr, m_fileName)); return; //throw new Exception(); } } } objValue = col.parseNumberMethod.Invoke( col.fieldType, new Object[] { value, col.NumberStyle, m_fileDescription.FileCultureInfo }); } else if (col.parseDateMethod != null) { objValue = col.parseDateMethod.Invoke( col.fieldType, new Object[] { value, col.OutputFormat, m_fileDescription.FileCultureInfo }); } else { // No TypeConverter and no Parse method available. // Try direct approach. objValue = value; } if (col.MemberInfo != null) { if (col.MemberInfo is PropertyInfo) { ((PropertyInfo)col.MemberInfo).SetValue(obj, objValue, null); } else { ((FieldInfo)col.MemberInfo).SetValue(obj, objValue); } } else if (!m_fileDescription.AllowSkipColunm) { throw new Exception(); } } catch (Exception ex) { if (ex is TargetInvocationException) { ex = ex.InnerException; } if (ex is FormatException) { ae.AddException(new WrongFormatException(value.ToString() + "-" + col.ToString() + "-" + LineNbr.ToString() + "Wrong Format", typeof(T).ToString(), value, col, LineNbr, m_fileName)); /*ex = new WrongDataFormatException( * typeof(T).ToString(), * tfi.name, * value, * row[i].LineNbr, * m_fileName, * ex);*/ } // ae. } } // return default(T); }
public IEnumerable <T> ReadData <T>( string fileName, StreamReader stream, ExportFileDescription fileDescription) where T : class, new() { // If T implements IDataRow, then we're reading raw data rows bool readingRawDataRows = typeof(IDataRow).IsAssignableFrom(typeof(T)); #if DEBUG List <T> ret = new List <T>(); #endif // The constructor for FieldMapper_Reading will throw an exception if there is something // wrong with type T. So invoke that constructor before you open the file, because if there // is an exception, the file will not be closed. // // If T implements IDataRow, there is no need for a FieldMapper, because in that case we're returning // raw data rows. FieldMapperReading <T> fm = null; if (!readingRawDataRows) { fm = new FieldMapperReading <T>(fileDescription, fileName, false, Key); } // ------- // Each time the IEnumerable<T> that is returned from this method is // accessed in a foreach, ReadData is called again (not the original Read overload!) // // So, open the file here, or rewind the stream. bool readingFile = !string.IsNullOrEmpty(fileName); if (readingFile) { stream = new StreamReader( fileName, fileDescription.TextEncoding, fileDescription.DetectEncodingFromByteOrderMarks); } else { // Rewind the stream if ((stream == null) || (!stream.BaseStream.CanSeek)) { // throw new BadStreamException(); throw new Exception(); } stream.BaseStream.Seek(0, SeekOrigin.Begin); } // ---------- ExportStream cs = new ExportStream(stream, null, fileDescription); // If we're reading raw data rows, instantiate a T so we return objects // of the type specified by the caller. // Otherwise, instantiate a DataRow, which also implements IDataRow. IDataRow row = null; if (readingRawDataRows) { row = new T() as IDataRow; } else { row = new DataRow(); } AggregatedException ae = new AggregatedException(typeof(T).ToString(), fileName, fileDescription.MaximumNbrExceptions); try { bool firstRow = true; while (cs.ReadRow(ref row)) { // Skip empty lines. // Important. If there is a newline at the end of the last data line, the code // thinks there is an empty line after that last data line. if ((row.Count == 1) && ((row[0].Value == null) || (string.IsNullOrEmpty(row[0].Value.Trim())))) { continue; } fm.CheckValid(row, ae); if (firstRow && fileDescription.FirstLineHasColumnNames) { if (!readingRawDataRows) { fm.ReadNames(row); } } else { T obj = default(T); try { if (readingRawDataRows) { obj = row as T; } else { obj = fm.ReadObject(row, ae); } } catch (FatalFormatException fex) { throw fex; } catch (AggregatedException ae2) { // Seeing that the AggregatedException was thrown, maximum number of exceptions // must have been reached, so rethrow. // Catch here, so you don't add an AggregatedException to an AggregatedException throw ae2; } catch (Exception ex) { // Store the exception in the AggregatedException ae. // That way, if a file has many errors leading to exceptions, // you get them all in one go, packaged in a single aggregated exception. ae.AddException(ex); } #if DEBUG ret.Add(obj); #else yield return(obj); #endif } firstRow = false; } } finally { if (readingFile) { stream.Close(); } // If any exceptions were raised while reading the data from the file, // they will have been stored in the AggregatedException ae. // In that case, time to throw ae. //ae.ThrowIfExceptionsStored(); } #if DEBUG return(ret); #endif }