public void TestTypeWriter_WriteAnonymous() { var people = from id in Enumerable.Range(0, 1) select new { Id = id, Name = "Bob " + id, Created = new DateTime(2013, 1, 19) }; var mapper = FixedLengthTypeMapper.DefineWriter(people); mapper.Property(p => p.Id, 10).ColumnName("id"); mapper.Property(p => p.Name, 100).ColumnName("name"); mapper.Property(p => p.Created, 8).ColumnName("created").InputFormat("yyyyMMdd").OutputFormat("yyyyMMdd"); using (MemoryStream stream = new MemoryStream()) { var options = new FixedLengthOptions() { FillCharacter = '@', RecordSeparator = "\n" }; mapper.Write(stream, options); stream.Position = 0; // go back to the beginning of the stream FixedLengthSchema schema = mapper.GetSchema(); FlatFileReader reader = new FlatFileReader(new FixedLengthReader(stream, schema, options)); Assert.IsTrue(reader.Read(), "The writer did not write the entities."); int id = reader.GetInt32(0); string name = reader.GetString(1); DateTime created = reader.GetDateTime(2); Assert.AreEqual(people.First().Id, id, "The ID value was not persisted."); Assert.AreEqual(people.First().Name, name, "The Name value was not persisted."); Assert.AreEqual(people.First().Created, created, "The Created value was not persisted."); Assert.IsFalse(reader.Read(), "The writer wrote too many records."); } }
public ContinentFixture() { Floors = FlatFileReader.Read("Data/continents_1_floors.json.gz") .Concat(FlatFileReader.Read("Data/continents_2_floors.json.gz")) .ToList() .AsReadOnly(); }
public void Test_Blackout() { bool isBlackoutTest = true; bool result; using (var testPipeline = new Pipeline()) { var reader = new FlatFileReader() { File = this.testDataPath + @"cd2.csv" }; reader.Formatter = new CsvToDataTableFormatter() { Separator = ";" }; testPipeline.Commands.Add(reader); result = true; testPipeline.OnExecutionCanceled += (sender, args) => { if (isBlackoutTest) { if (args.Result == "Blackout") { // ok result = false; } } else { // ok result = true; } }; // set blackout to current testPipeline.BlackoutStart = DateTime.Now.Subtract(new TimeSpan(1, 0, 0)); testPipeline.BlackoutEnd = DateTime.Now.Add(new TimeSpan(1, 0, 0)); result = testPipeline.ExecutePipeline(); // check Assert.IsFalse(result); // set blackout to another time testPipeline.BlackoutStart = DateTime.Now.Subtract(new TimeSpan(2, 0, 0)); testPipeline.BlackoutEnd = DateTime.Now.Subtract(new TimeSpan(1, 0, 0)); result = testPipeline.ExecutePipeline(); } // check Assert.IsTrue(result); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } reader = new FlatFileReader<FlatFileReaderTarget>(StandardModelBinder.Basic(), new StubServiceLocator()); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } reader = new FlatFileReader <FlatFileReaderTarget>(StandardModelBinder.Basic(), new StubServiceLocator()); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } reader = new FlatFileReader <FlatFileReaderTarget>(ObjectResolver.Basic(), new InMemoryServiceLocator()); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } reader = new FlatFileReader<FlatFileReaderTarget>(ObjectResolver.Basic(), new InMemoryServiceLocator()); }
public void Test_ReadWriteCopyCsv() { using (var testPipeline = new Pipeline()) { var looper = new FileLooper() { SourceDirectory = this.testDataPath, FileFilter = @"*.csv" }; testPipeline.Commands.Add(looper); var reader = new FlatFileReader() { File = "{File}" }; reader.Formatter = new CsvToDataTableFormatter() { Separator = ";" }; looper.AddChild(reader); reader.AddChild(new TableFilter() { }); var writer = new FlatFileWriter() { File = this.resultPath + @"pipeline\{FileName}" }; writer.Formatter = new DataTableToCsvFormatter(); reader.AddChild(writer); looper.AddChild(new FileMover() { SourceFile = @"{File}", TargetDirectory = this.resultPath + @"Archive", Mode = FileMover.FileMoveModes.Copy }); looper.AddChild(new FileZipper() { SourceFile = @"{File}", TargetDirectory = this.resultPath + @"Archive\Zipped", ZipName = "Archive_{yyyyMMdd}.zip", RemoveSourceFile = true }); testPipeline.ExecutePipeline(); } // check int sourceFileCount = Directory.GetFiles(this.testDataPath, @"*.csv", SearchOption.TopDirectoryOnly).Length; int targetFileCount = Directory.GetFiles(this.resultPath + @"pipeline", @"*.csv", SearchOption.TopDirectoryOnly).Length; int archiveFileCount = Directory.GetFiles(this.resultPath + @"Archive", @"*.csv", SearchOption.TopDirectoryOnly).Length; int zipFileCount = Directory.GetFiles(this.resultPath + @"Archive\Zipped", @"Archive_*.zip", SearchOption.TopDirectoryOnly).Length; Assert.AreEqual(sourceFileCount, targetFileCount); Assert.AreEqual(0, archiveFileCount); Assert.AreEqual(1, zipFileCount); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } var container = new Container(new AppSettingProviderRegistry()); reader = container.GetInstance<FlatFileReader<FlatFileReaderTarget>>(); }
public void SetUp() { using (var writer = new StreamWriter("flatfile.txt")) { writer.WriteLine("A|B|C|D"); writer.WriteLine("a0|b0|c0|d0"); writer.WriteLine("a1|b1|c1|d1"); writer.WriteLine("a2|b2|c2|d2"); } var container = new Container(new AppSettingProviderRegistry()); reader = container.GetInstance <FlatFileReader <FlatFileReaderTarget> >(); }
public void Test_ReadCsv_WriteCsv_Cd() { using (var testPipeline = new Pipeline()) { var reader = new FlatFileReader() { File = this.testDataPath + @"cd2.csv" }; reader.Formatter = new CsvToDataTableFormatter() { Separator = ";" }; testPipeline.Commands.Add(reader); var writer = new FlatFileWriter() { File = this.resultPath + @"cd2_copy.csv" }; writer.Formatter = new DataTableToCsvFormatter() { Separator = ";" }; reader.AddChild(writer); //testPipeline.CommandHook = (cmd) => //{ //}; testPipeline.OnExecuteCommand += (cmd) => { }; testPipeline.ExecutePipeline(); } // check var sourcelineCount = File.ReadLines(this.testDataPath + @"cd2.csv").Count(); var targetlineCount = File.ReadLines(this.resultPath + @"cd2_copy.csv").Count(); Assert.AreEqual(sourcelineCount, targetlineCount); if (!FileUtil.CompareFiles(this.testDataPath + "cd2.csv", this.resultPath + "cd2_copy.csv")) { throw new Exception("Original and copied file do not match"); } }
public void ShouldCreateCustomerFromCsv() { Schema schema = new Schema().AddColumn<string>("name").AddColumn<DateTime>("modified").AddColumn<int>("visits"); Mapper<Customer> mapper = new Mapper<Customer>().Map(c => c.Name).To("name").Map(c => c.LastModified).To("modified").Map(c => c.Visits).To("visits"); const string data = @"name,modified,visits bob,12/31/2012,108"; Stream stream = new MemoryStream(Encoding.Default.GetBytes(data)); SeparatedValueParserOptions options = new SeparatedValueParserOptions() { IsFirstRecordSchema = true }; SeparatedValueParser parser = new SeparatedValueParser(stream, schema, options); FlatFileReader reader = new FlatFileReader(parser); IEnumerable<Customer> customers = mapper.Extract(reader); Assert.AreEqual(1, customers.Count(), "The wrong number of records were mapped."); Assert.AreEqual("bob", customers.First().Name, "The customer name was not parsed correctly."); Assert.AreEqual(new DateTime(2012, 12, 31), customers.First().LastModified, "The customer modified date was not parsed correctly."); Assert.AreEqual(108, customers.First().Visits, "The customer visits was not parsed correctly."); }
public ForexRateInserter(ImportForexParam param, Stream stream, XpoFieldMapper xpoFieldMapper) { if (param == null) { throw new UserFriendlyException("Param cannot be null"); } if (stream == null) { throw new UserFriendlyException("Stream cannot be null"); } if (xpoFieldMapper == null) { throw new UserFriendlyException("XpoFieldMapper cannot be null"); } // reader must be instantiated on main thread or you get null exception var reader = new FlatFileReader(stream, Encoding.GetEncoding("iso-8859-1")); }
public void Test_ReadCsv_WriteFixedLength_Cd2() { using (var testPipeline = new Pipeline()) { var looper = new FileLooper() { SourceDirectory = this.testDataPath, FileFilter = @"cd2.csv" }; testPipeline.Commands.Add(looper); var reader = new FlatFileReader() { File = "{File}" }; reader.Formatter = new CsvToDataTableFormatter() { Separator = ";", Enclosure = "\"" }; looper.AddChild(reader); var formatter = new DataTableToFixedLengthFormatter(); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("name", 15))); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("addr", 25))); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("telefon", 10))); var writer = new FlatFileWriter() { File = this.resultPath + @"{FileName}" }; writer.Formatter = formatter; reader.AddChild(writer); testPipeline.ExecutePipeline(); } // check var sourcelineCount = File.ReadLines(this.testDataPath + @"cd2.csv").Count(); var targetlineCount = File.ReadLines(this.resultPath + @"cd2.csv").Count(); Assert.AreEqual(sourcelineCount, targetlineCount); }
public void Test_TableFilter() { using (var testPipeline = new Pipeline()) { var reader = new FlatFileReader() { File = this.testDataPath + @"cd2.csv" }; reader.Formatter = new CsvToDataTableFormatter() { Separator = ";", Enclosure = "\"" }; testPipeline.Commands.Add(reader); var filter = new TableFilter(); filter.FilterConditions.Add(new Condition() { Token = "name", Operator = ConditionOperators.Contains, Value = "restaur" }); reader.AddChild(filter); var writer = new FlatFileWriter() { File = this.resultPath + "{File}" }; writer.Formatter = new DataTableToCsvFormatter() { Separator = ";" }; reader.AddChild(writer); var result = testPipeline.ValidatePipeline(); if (result.Any()) { throw new ArgumentException(string.Join(Environment.NewLine, result)); } testPipeline.ExecutePipeline(); } }
public void Test_SqlTableImport() { using (var testPipeline = new Pipeline() { StreamingBlockSize = 20 }) { var looper = new FileLooper() { SourceDirectory = this.resultPath, FileFilter = "mis.*.txt" }; testPipeline.Commands.Add(looper); var reader = new FlatFileReader() { File = "{File}", Formatter = new CsvToDataTableFormatter() { Separator = ";" } }; looper.AddChild(reader); var tableWriter = new DbTableWriter { ConnectionInfo = new OracleNativeDbConnectionInfo() { UserName = "", Password = "", Database = "ORACLE01", Host = "COMPUTER01" }, DeleteBefore = false, TableName = "{DataName}_bak" }; reader.AddChild(tableWriter); testPipeline.ExecutePipeline(); } }
public void TestTypeWriter_AnonymousType() { var people = from id in Enumerable.Range(0, 1) select new { Id = id, Name = "Bob " + id, Created = new DateTime(2013, 1, 19) }; var mapper = SeparatedValueTypeMapper.DefineWriter(people); mapper.Property(p => p.Id).ColumnName("id"); mapper.Property(p => p.Name).ColumnName("name"); mapper.Property(p => p.Created).ColumnName("created").InputFormat("yyyyMMdd").OutputFormat("yyyyMMdd"); using (MemoryStream stream = new MemoryStream()) { var options = new SeparatedValueOptions() { IsFirstRecordSchema = true, Separator = "\t" }; mapper.Write(stream, options); stream.Position = 0; // go back to the beginning of the stream SeparatedValueSchema schema = mapper.GetSchema(); FlatFileReader reader = new FlatFileReader(new SeparatedValueReader(stream, schema, options)); Assert.IsTrue(reader.Read(), "The writer did not write the entities."); int id = reader.GetInt32(0); string name = reader.GetString(1); DateTime created = reader.GetDateTime(2); Assert.AreEqual(people.First().Id, id, "The ID value was not persisted."); Assert.AreEqual(people.First().Name, name, "The Name value was not persisted."); Assert.AreEqual(people.First().Created, created, "The Created value was not persisted."); Assert.IsFalse(reader.Read(), "The writer wrote too many records."); } }
public void ShouldCreateCustomerFromCsv() { Schema schema = new Schema().AddColumn <string>("name").AddColumn <DateTime>("modified").AddColumn <int>("visits"); Mapper <Customer> mapper = new Mapper <Customer>().Map(c => c.Name).To("name").Map(c => c.LastModified).To("modified").Map(c => c.Visits).To("visits"); const string data = @"name,modified,visits bob,12/31/2012,108"; Stream stream = new MemoryStream(Encoding.Default.GetBytes(data)); SeparatedValueParserOptions options = new SeparatedValueParserOptions() { IsFirstRecordSchema = true }; SeparatedValueParser parser = new SeparatedValueParser(stream, schema, options); FlatFileReader reader = new FlatFileReader(parser); IEnumerable <Customer> customers = mapper.Extract(reader); Assert.AreEqual(1, customers.Count(), "The wrong number of records were mapped."); Assert.AreEqual("bob", customers.First().Name, "The customer name was not parsed correctly."); Assert.AreEqual(new DateTime(2012, 12, 31), customers.First().LastModified, "The customer modified date was not parsed correctly."); Assert.AreEqual(108, customers.First().Visits, "The customer visits was not parsed correctly."); }
public void Test_ReadFixedLength_WriteCsv_Fixed2() { using (var testPipeline = new Pipeline()) { var looper = new FileLooper() { SourceDirectory = this.testDataPath, FileFilter = @"FixedText2.txt" }; testPipeline.Commands.Add(looper); var formatter = new FixedLengthToDataTableFormatter(); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("Header1", 15))); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("Header2", 25))); formatter.FieldDefinitions.Add(new FieldDefinition(new Field("Header3", 10))); var reader = new FlatFileReader() { File = "{File}", Formatter = formatter }; looper.AddChild(reader); var writer = new FlatFileWriter() { File = this.resultPath + @"{FileName}" }; writer.Formatter = new DataTableToCsvFormatter(); reader.AddChild(writer); testPipeline.ExecutePipeline(); } // check var sourcelineCount = File.ReadLines(this.testDataPath + @"FixedText2.txt").Count(); var targetlineCount = File.ReadLines(this.resultPath + @"FixedText2.txt").Count(); Assert.AreEqual(sourcelineCount, targetlineCount); }
public SkinFixture() { Skins = FlatFileReader.Read("Data/skins.json.gz").ToList().AsReadOnly(); }
public AchievementFixture() { Achievements = FlatFileReader.Read("Data/achievements.json.gz").ToList().AsReadOnly(); }
public virtual void ProcessSingleFile(string inboundDirectoryName, string inboundFileName) { log.Info("Start inbound file " + inboundFileName); FlatFileReader reader = new FlatFileReader(inboundFileName, Encoding.ASCII, "\t"); try { OrderHead orderHead = null; OrderDetail orderDetail = null; string shiftCode = string.Empty; Hu hu = null; string[] fields = reader.ReadLine(); while (fields != null) { string prodLine = fields[0]; string itemCode = fields[1]; string huId = fields[2]; decimal qty = decimal.Parse(fields[3]); string itemHuId = fields[4]; string onlineDate = fields[5]; string onlineTime = fields[6]; string offlineDate = fields[7]; string offlineTime = fields[8]; string customerCode = fields[9]; string customerLoc = fields[10]; if (orderHead == null) { #region 查找工单 shiftCode = BarcodeHelper.GetShiftCode(huId); DetachedCriteria criteria = DetachedCriteria.For <OrderHead>(); criteria.CreateAlias("Flow", "f"); //criteria.CreateAlias("Shift", "s"); criteria.Add(Expression.Like("f.Code", prodLine, MatchMode.End)); criteria.Add(Expression.Eq("s.Code", shiftCode)); criteria.Add(Expression.Eq("Status", BusinessConstants.CODE_MASTER_STATUS_VALUE_INPROCESS)); criteria.AddOrder(Order.Asc("StartTime")); IList <OrderHead> orderHeadList = this.criteriaMgr.FindAll <OrderHead>(criteria); #endregion if (orderHeadList != null && orderHeadList.Count > 0) { foreach (OrderHead targetOrderHead in orderHeadList) { orderHead = targetOrderHead; #region 查找工单明细 IList <OrderDetail> orderDetailList = orderHead.OrderDetails; foreach (OrderDetail targetOrderDetail in orderDetailList) { if (targetOrderDetail.Item.Code == itemCode) { log.Info("Find match wo " + orderHead.OrderNo); orderDetail = targetOrderDetail; orderDetail.CurrentReceiveQty = qty; break; } } #endregion if (orderDetail != null) { break; } } } else { throw new BusinessErrorException("No active wo find for prodline + " + prodLine + ", shift " + shiftCode); } if (orderDetail != null) { #region 创建外包装条码 if (this.huMgr.LoadHu(huId) == null) { log.Info("Insert hu " + huId + " into database."); hu = ResolveAndCreateHu(huId, orderDetail, qty); orderDetail.HuId = hu.HuId; Receipt receipt = new Receipt(); ReceiptDetail receiptDetail = new ReceiptDetail(); receiptDetail.OrderLocationTransaction = this.orderLocationTransactionMgr.GetOrderLocationTransaction(orderDetail.Id, BusinessConstants.IO_TYPE_IN)[0]; receiptDetail.HuId = hu.HuId; receiptDetail.ReceivedQty = qty; receiptDetail.Receipt = receipt; receiptDetail.LotNo = hu.LotNo; #region 找Out的OrderLocTrans,填充MaterialFulshBack IList <OrderLocationTransaction> orderLocTransList = this.orderLocationTransactionMgr.GetOrderLocationTransaction(orderDetail.Id, BusinessConstants.IO_TYPE_OUT); foreach (OrderLocationTransaction orderLocTrans in orderLocTransList) { MaterialFlushBack material = new MaterialFlushBack(); material.OrderLocationTransaction = orderLocTrans; if (orderLocTrans.UnitQty != 0) { material.Qty = qty; } receiptDetail.AddMaterialFlushBack(material); } #endregion receipt.AddReceiptDetail(receiptDetail); this.orderManager.ReceiveOrder(receipt, this.userMgr.GetMonitorUser()); } else { throw new BusinessErrorException("Hu " + huId + " already exist in database."); } #endregion } else { throw new BusinessErrorException("No item found for item code " + itemCode + " for prodline + " + prodLine + ", shift " + shiftCode); } } #region 创建内包装条码 if (this.huMgr.LoadHu(itemHuId) == null) { log.Info("Insert hu " + itemHuId + " into database."); CreateItemHu(itemHuId, orderDetail, hu.LotNo, hu.ManufactureDate); } else { throw new BusinessErrorException("Hu " + itemHuId + " already exist in database."); } #endregion fields = reader.ReadLine(); } } finally { reader.Dispose(); } }
public ItemPriceFixture() { ItemPrices = FlatFileReader.Read("Data/prices.json.gz").ToList().AsReadOnly(); }
public static EDIValidationResult ValidateEdi(string ediData, string ediFileName, string schemaFileName, string certFileDisplayName, string type, SpecCertFileType fileType, IDocumentPlug documentPlug, out IFatpipeDocument fatpipeDocument) { Stopwatch sw = new Stopwatch(); sw.Start(); EDIValidationResult ediValidationResult = new EDIValidationResult() { FileName = ediFileName, SchemaName = schemaFileName, SegmentValidationResults = new List <SegmentValidationResult>(), TransactionNumbers = new List <string>(), DisplayName = certFileDisplayName, Type = type, }; fatpipeDocument = null; try { ediValidationResult.SegmentValidationResults.Clear(); string endHeader = string.Empty; switch (fileType) { case SpecCertFileType.X12: EDIReader ediReader = new EDIReader(); if (ediReader.Initialize(new MemoryStream(Encoding.UTF8.GetBytes(ediData)), null, documentPlug) == true) { try { IFatpipeDocument currentFatpipeDocument = null; while ((currentFatpipeDocument = ediReader.GetNextTransactionSet()) != null) { if (string.IsNullOrWhiteSpace(ediValidationResult.BeautifiedOriginalPayload)) { ediValidationResult.BeautifiedOriginalPayload = currentFatpipeDocument.BeautifiedOriginalPayloadStartHeader; endHeader = currentFatpipeDocument.BeautifiedOriginalPayloadEndHeader; } ediValidationResult.BeautifiedOriginalPayload += currentFatpipeDocument.BeautifiedOriginalPayloadBody; ediValidationResult.TransactionNumbers.Add(currentFatpipeDocument.TransactionNumber); ediValidationResult.SegmentValidationResults.AddRange(ediReader.Errors.GetSegmentValidationResults()); fatpipeDocument = currentFatpipeDocument; } ediValidationResult.BeautifiedOriginalPayload += endHeader; } catch (EDIReaderException ediReaderException) { // Add whatever errors we accumulated ediValidationResult.SegmentValidationResults.AddRange(ediReader.Errors.GetSegmentValidationResults()); ediValidationResult.SegmentValidationResults.Add( new SegmentValidationResult() { Type = ResultType.Error, SequenceNumber = -1, Name = "N/A", Description = ediReaderException.Message, StartIndex = -1, EndIndex = -1, }); } } //ediValidationResult.SegmentValidationResults.AddRange(ediReader.Errors.GetSegmentValidationResults()); break; case SpecCertFileType.FlatFile: FlatFileReader flatFileReader = new FlatFileReader(); fatpipeDocument = flatFileReader.ReadFile(new MemoryStream(Encoding.UTF8.GetBytes(ediData)), documentPlug); ediValidationResult.BeautifiedOriginalPayload = fatpipeDocument.BeautifiedOriginalPayloadBody; ediValidationResult.SegmentValidationResults.AddRange(flatFileReader.Errors.GetSegmentValidationResults()); break; case SpecCertFileType.Xml: XmlFileReader xmlFileReader = new XmlFileReader(); fatpipeDocument = xmlFileReader.ReadFile(new MemoryStream(Encoding.UTF8.GetBytes(ediData)), documentPlug); ediValidationResult.BeautifiedOriginalPayload = fatpipeDocument.BeautifiedOriginalPayloadBody; ediValidationResult.SegmentValidationResults.AddRange(xmlFileReader.Errors.GetSegmentValidationResults()); break; default: ediValidationResult.SegmentValidationResults.Add( new SegmentValidationResult() { Type = ResultType.Error, SequenceNumber = -1, Name = "N/A", Description = "Invalid cert file type (only EDI and FaltFile is supported)", //ex.Message, StartIndex = -1, EndIndex = -1, }); break; } } catch (Exception ex) { ediValidationResult.SegmentValidationResults.Add( new SegmentValidationResult() { Type = ResultType.Error, SequenceNumber = -1, Name = "N/A", //Description = "Internal error occurred",//ex.Message, Description = "Internal error occurred. " + ex.ToString(), StartIndex = -1, EndIndex = -1, }); } sw.Stop(); ediValidationResult.ExecutionTime = sw.Elapsed; return(ediValidationResult); }
public virtual void ProcessInboundFile(DssInboundControl dssInboundControl, string[] files) { logLoadFile.Info("Start process inbound "); //重新提交数据 #region DataReader foreach (string fileName in files) { try { IList <DssImportHistory> dssImportHistoryList = new List <DssImportHistory>(); #region 读取文件 logLoadFile.Info("Start load file " + fileName); FlatFileReader reader = null; try { DssImportHistory dssImportHistory = new DssImportHistory(); dssImportHistory.DssInboundCtrl = dssInboundControl; dssImportHistory.IsActive = true; dssImportHistory.KeyCode = Path.GetFileNameWithoutExtension(fileName); dssImportHistory.CreateDate = DateTime.Now; reader = this.DataReader(fileName, Encoding.GetEncoding(dssInboundControl.FileEncoding), "|"); for (string[] lineData = reader.ReadLine(); lineData != null; lineData = reader.ReadLine()) { this.FillDssImportHistory(lineData, dssImportHistory); if (dssImportHistory[0] == "0") { dssImportHistory.EventCode = BusinessConstants.DSS_EVENT_CODE_DELETE; DssHelper.FormatDeleteData(lineData, BusinessConstants.DSS_SYSTEM_CODE_QAD);//QAD删除去引号 } else { dssImportHistory.EventCode = BusinessConstants.DSS_EVENT_CODE_CREATE; } } dssImportHistoryList.Add(dssImportHistory); } catch (Exception ex) { reader.Dispose(); logLoadFile.Error("Process inbound file: " + fileName + " Error.", ex); throw ex; } finally { reader.Dispose(); logLoadFile.Info("Process inbound file: " + fileName + " successful."); } logLoadFile.Info("End load file " + fileName); #endregion #region CreateDssImportHistory logLoadFile.Info("Start save file" + fileName); CreateDssImportHistory(dssInboundControl, dssImportHistoryList, files); logLoadFile.Info("End save file" + fileName); #endregion #region Archive download file try { logLoadFile.Info("Start backup file " + fileName); ArchiveFile(new string[] { fileName }, dssInboundControl.ArchiveFloder); logLoadFile.Info("End backup file" + fileName); } catch (Exception ex) { logLoadFile.Error("Archive download file error:", ex); } #endregion } catch (Exception ex) { logLoadFile.Error("Create DssImportHistory error:", ex); } } #endregion }
public ItemFixture() { Items = FlatFileReader.Read("Data/items.json.gz").ToList().AsReadOnly(); }
public RecipeFixture() { Recipes = FlatFileReader.Read("Data/recipes.json.gz").ToList().AsReadOnly(); }
public OrderBookFixture() { ItemPrices = FlatFileReader.Read("Data/listings.json.gz").ToList().AsReadOnly(); }