/// <summary> /// Create an appropriate reader. /// </summary> /// <param name="filename">The filename of the import file.</param> /// <param name="translation">The translation scheme between the import file and the record, if any.</param> /// <param name="schema">The xsd schema for the import.</param> /// <param name="schemaVersion">The version of the xsd schema to look for.</param> /// <param name="recordType">The type of the records to create.</param> /// <param name="parameters">Additional parameters for the record.</param> /// <param name="writeFailedRecord">The function to report failed records with.</param> /// <returns>The reader for the file.</returns> public static ImportReader Create(String filename, Dictionary <String, String> translation, Stream schema, Int32 schemaVersion, Type recordType, Dictionary <String, object> parameters, WriteFailedRecordDelegate writeFailedRecord) { String extension = Path.GetExtension(filename); ImportReader reader = null; if (extension.Equals(".xml")) { reader = new XmlImportReader(filename, translation, schema, schemaVersion, recordType, parameters, writeFailedRecord); } else if (extension.Equals(".xlsx")) { reader = new ExcelImportReader(filename, translation, schema, schemaVersion, recordType, parameters, writeFailedRecord); } return(reader); }
/// <summary> /// Read records in from the import file. /// </summary> /// <param name="reader">The reader to use.</param> /// <param name="queue">The queue to write to.</param> private void Read(ImportReader reader, SynchronizedQueue <ImportPair> queue) { List <Action> failed; Array succeeded; try { while (reader.HasRecords && !queue.Empty) { Int32 bulkCount; lock (queue.SyncRoot) bulkCount = this.BulkCount; reader.GetBatch(bulkCount, out succeeded, out failed); queue.Enqueue(new ImportPair(succeeded, failed)); } } catch (SynchronizedQueue <ImportPair> .QueueEmptyException) { // We've cleared the queue - finish up. } catch (Exception exception) { queue.Enqueue(new ImportPair( Array.CreateInstance(this.RecordType, 0), new List <Action> { delegate { throw exception; } })); } finally { reader.Dispose(); queue.Empty = true; } }
/// <summary> /// Import a file. /// </summary> /// <param name="filename">The file to import from.</param> private void Start(string filename) { Int64 index = 1; Int64 failed = 0; Int64 succeeded = 0; XmlWriterSettings settings = new XmlWriterSettings() { CloseOutput = true, ConformanceLevel = ConformanceLevel.Document, Indent = true, IndentChars = "\t", NewLineChars = "\r\n", NewLineHandling = NewLineHandling.Replace, NewLineOnAttributes = true, }; XmlWriter failure = XmlWriter.Create(this.OpenFailureFile(filename), settings); SynchronizedQueue <ImportPair> queue = new SynchronizedQueue <ImportPair>(DataImporter.CacheSize); //DataModel.IsReading = false; try { ImportReader reader = ImportReader.Create( filename, this.Translation, this.Schema, this.SchemaVersion, this.RecordType, this.Parameters, (i, r, e) => this.WriteFailedRecord(failure, i, r, e)); ImportPair records = new ImportPair(); failure.WriteStartDocument(); failure.WriteStartElement("FailedRecords"); failure.WriteAttributeString("xmlns", "xsd", null, "http://www.w3.org/2001/XMLSchema"); this.maximumErrorCount = this.Translation.Count; //this.WriteFailureSchema(failure); this.Begin(() => this.Read(reader, queue)); // Let our listeners know that we're starting the actual upload. if (this.EndLoading != null) { this.EndLoading(this, new InfoEventArgs(reader.Size, this.failureFile)); } while (!queue.Empty || queue.Count != 0) { Int32 subIndex = 0; Dictionary <object, string> errors = null; Int32 sentSize; records = queue.Dequeue(); failed += records.Value.Count; index += records.Value.Count; foreach (Action failReport in records.Value) { failReport(); } errors = this.Importer(records.Key, out sentSize); if (sentSize < records.Key.Length) { lock (queue.SyncRoot) this.BulkCount = sentSize; } foreach (object record in errors.Keys) { List <String> messages = new List <String>() { errors[record] }; failed += 1; this.WriteFailedRecord(failure, index + subIndex, reader.DetranslateRecord(record), messages); subIndex += 1; } succeeded += records.Key.Length - errors.Count; if (this.ImportPulse != null) { this.ImportPulse(this, new ImportEventArgs(reader.Position, succeeded, failed)); } index += records.Key.Length; } if (this.Success != null) { this.Success(this, new ImportEventArgs(0, succeeded, failed)); } } catch (ThreadAbortException exception) { if (this.Failed != null) { this.Failed(this, new ImportEventArgs(0, succeeded, failed, exception)); } EventLog.Information("Import of {0} aborted by user", filename); } catch (Exception exception) { if (this.Failed != null) { this.Failed(this, new ImportEventArgs(0, succeeded, failed, exception)); } EventLog.Information("Error importing {3}\n{0}: {1}\n{2}", exception.GetType(), exception.Message, exception.StackTrace, filename); } finally { try { failure.WriteEndDocument(); failure.Close(); } catch (Exception exception) { EventLog.Information("Error closing failure file for import {3}\n{0}: {1}\n{2}", exception.GetType(), exception.Message, exception.StackTrace, filename); } // If there are no errors, try to remove the failure file (since it's empty anyway). try { if (this.failureFile != null && failed == 0) { File.Delete(this.failureFile); } } catch (Exception exception) { EventLog.Information("Error removing failure file for import {3}\n{0}: {1}\n{2}", exception.GetType(), exception.Message, exception.StackTrace, filename); } queue.Empty = true; } //DataModel.IsReading = true; }