public ExtractParseResult(string batchFileText, Dictionary <string, Type> dataObjectMap) : this() { var wrappers = CompositeWrapper.Parse(batchFileText, dataObjectMap); AddLines(wrappers); }
public static void ImportFileInSteps(Stream dataStream, string path, int batchSize, Encoding encoding) { var allPnrs = new List <string>(); using (var file = new StreamReader(dataStream, encoding)) { var extractResult = new ExtractParseResult(); long totalReadLinesCount = 0; using (var conn = new SqlConnection(CprBroker.Config.Properties.Settings.Default.CprBrokerConnectionString)) { conn.Open(); using (var dataContext = new ExtractDataContext(conn)) { // Find existing extract or create a new one var extract = dataContext.Extracts.Where(e => e.Filename == path && e.ProcessedLines != null && !e.Ready).OrderByDescending(e => e.ImportDate).FirstOrDefault(); if (extract == null) { extract = extractResult.ToExtract(path, false, 0); Admin.LogFormattedSuccess("Creating new extract <{0}>", extract.ExtractId); dataContext.Extracts.InsertOnSubmit(extract); dataContext.SubmitChanges(); } else { Admin.LogFormattedSuccess("Incomplete extract found <{0}>, resuming", extract.ExtractId); } // Start reading the file while (!file.EndOfStream) { var wrappers = CompositeWrapper.Parse(file, Constants.DataObjectMap, batchSize); var batchReadLinesCount = wrappers.Count; totalReadLinesCount += batchReadLinesCount; Admin.LogFormattedSuccess("Batch read, records found <{0}>, total so far <{1}>", batchReadLinesCount, totalReadLinesCount); using (var trans = conn.BeginTransaction(System.Data.IsolationLevel.ReadUncommitted)) { dataContext.Transaction = trans; extractResult.ClearArrays(); var uninsertedLinesCount = totalReadLinesCount - extract.ProcessedLines.Value; if (uninsertedLinesCount > 0) { var linesToSkip = wrappers.Count - (int)uninsertedLinesCount; if (linesToSkip > 0) { Admin.LogFormattedSuccess("Unaligned batch sizes, skipping <{0}> lines", linesToSkip); wrappers = wrappers.Skip(linesToSkip).ToList(); } extractResult.AddLines(wrappers); // Set start record if (string.IsNullOrEmpty(extract.StartRecord) && extractResult.StartWrapper != null) { extract.StartRecord = extractResult.StartWrapper.Contents; extract.ExtractDate = extractResult.StartWrapper.ProductionDate.Value; } // Child records conn.BulkInsertAll <ExtractItem>(extractResult.ToExtractItems(extract.ExtractId, Constants.DataObjectMap, Constants.RelationshipMap, Constants.MultiRelationshipMap), trans); conn.BulkInsertAll <ExtractError>(extractResult.ToExtractErrors(extract.ExtractId), trans); // TODO: (Extract) In case some records have been skipped in a previous import attempt, make sure that allPnrs contains their PNR's conn.BulkInsertAll <ExtractPersonStaging>(extractResult.ToExtractPersonStagings(extract.ExtractId, allPnrs), trans); // Update counts extract.ProcessedLines = totalReadLinesCount; // End record and mark as ready if (extractResult.EndLine != null) { extract.EndRecord = extractResult.EndLine.Contents; extract.Ready = true; Admin.LogFormattedSuccess("End record added"); } dataContext.SubmitChanges(); Admin.LogFormattedSuccess("Batch committed"); } else { Admin.LogFormattedSuccess("Batch already inserted, skipping"); } trans.Commit(); } } } } } }