/// <summary> /// Creates a new parser using the given <see cref="IXLWorksheet"/> and <see cref="CsvConfiguration"/>. /// </summary> /// <param name="worksheet">The <see cref="IXLWorksheet"/> with the data.</param> /// <param name="configuration">The configuration.</param> public ExcelParser(IXLWorksheet worksheet, CsvConfiguration configuration) { workbook = worksheet.Workbook; this.worksheet = worksheet; this.configuration = configuration; FieldCount = worksheet.RowsUsed().CellsUsed().Max(cell => cell.Address.ColumnNumber); }
public CsvImporterDataSnapshot() { this.PathCsv = "C:\\"; this.FieldSetupCurrentName = "Default"; this.FieldSetupsByName = new Dictionary<string, FieldSetup>(); this.FieldSetupsByName.Add(this.FieldSetupCurrentName, new FieldSetup(this.FieldSetupCurrentName)); this.CsvConfiguration = new CsvConfiguration(); this.CsvConfiguration.Delimiter = ";"; this.CsvConfiguration.AllowComments = true; this.CsvConfiguration.TrimFields = true; this.CsvFieldTypeFormatsAvailable = new Dictionary<string, List<string>>(); this.CsvFieldTypeFormatsAvailable.Add("Open,High,Low,Close,Volume", new List<string>() {"#,###.##", "#.###,##"}); //http://www.csharp-examples.net/string-format-datetime/ http://msdn.microsoft.com/en-us/library/8kb3ddd4.aspx http://msdn.microsoft.com/en-us/library/az4se3k1.aspx this.CsvFieldTypeFormatsAvailable.Add("Date", new List<string>() {CsvTypeParser.FORMAT_TRY_ALL, "d", "D", "yyyyMMdd", "yyyy-MM-dd", "yyyy-MMM-dd", "d/M/yyyy", "dd/MM/yyyy", "dd/MM/yy", "MM-dd-yy"}); this.CsvFieldTypeFormatsAvailable["Date"].Sort(); this.CsvFieldTypeFormatsAvailable.Add("Time", new List<string>() {CsvTypeParser.FORMAT_TRY_ALL, "t", "T", "HHmmss", "hmmss", "h:mm tt", "h:mm:ss tt", "h:mm tt", "HH':'mm':'ss 'GMT'", "'T'HH':'mm':'ss", "HH':'mm':'ss'Z'", "THHmmssZ"}); this.CsvFieldTypeFormatsAvailable["Time"].Sort(); }
public void CanQuoteAllFields() { var sb = new StringBuilder(); var cfg = new CsvConfiguration { QuoteAllFields = true, Quote = '\'', Delimiter = ';', Escape = '"', TrimValues = true, SupportsMultiline = true }; using (var writer = new CsvWriter(new StringWriter(sb), cfg)) { writer.WriteFields(new string[] { "1234", " abc ", " def", "ghi ", "jk\rl\nmno", "pqrs" }); writer.NextRow(); } string expected = "'1234';'abc';'def';'ghi';'jk\rl\nmno';'pqrs'" + "\r\n"; Assert.AreEqual(expected, sb.ToString()); }
internal string CreateCsv(IEnumerable<dynamic> data, CsvConfiguration configuration) { if (Structure != null && Structure.Count > 0) return createCsvWithStructure(data, configuration); else return createCsvWithoutStructure(data, configuration); }
public CsvReader(string path, CsvConfiguration config, Domain domain, bool isTestReader) { Path = path; _reader = new CsvHelper.CsvReader(File.OpenText(this.Path), config); Domain = domain; IsTestReader = isTestReader; }
private static void Main(string[] args) { var c = new CsvConfiguration() { HasHeaderRecord = false }; var csv = new CsvReader(new StreamReader(DbIpCityFile), c); var items = csv.GetRecords<DbIpCity>(); var dt = DatabaseManager.GetDataTable(); foreach (var item in items) { var row = dt.NewRow(); row["Ip_Start"] = item.IpStart; row["Ip_End"] = item.IpEnd; row["city"] = item.City; row["region"] = item.Region; row["country"] = item.Country; row["type"] = item.Type; dt.Rows.Add(row); } DatabaseManager.BulkInsert(ConnString, dt); }
public void TestMovieLensSingle() { // step 1: dataset var config = new CsvConfiguration() { Delimiter = "::", HasHeaderRecord = true }; // load data var trainReader = new CsvReader(Paths.MovieLens1MTrain75, config); var testReader = new CsvReader(Paths.MovieLens1MTest25, config, true); var container = new DataContainer(); trainReader.LoadData(container); testReader.LoadData(container); var startTime = DateTime.Now; var splitter = new RatingSimpleSplitter(container); //var recommender = new MediaLiteRatingPredictor(new MatrixFactorization()); var recommender = new LibFmTrainTester(libFmPath: "LibFm.Net.64.exe"); // evaluation var ctx = new EvalutationContext<ItemRating>(recommender, splitter); var ep = new EvaluationPipeline<ItemRating>(ctx); ep.Evaluators.Add(new RMSE()); ep.Run(); var duration = (int)DateTime.Now.Subtract(startTime).TotalMilliseconds; Console.WriteLine("RMSE\tDuration\n{0}\t{1}", ctx["RMSE"], duration); }
public static float[] ReadFluorophores(String fileName) { if(!File.Exists(fileName)) throw new FileNotFoundException(fileName); CsvConfiguration configuration = new CsvConfiguration(); configuration.HasHeaderRecord = false; configuration.TrimFields = true; // List<float[]> fluorophores = new List<float[]>(); List<float> fluorophores = new List<float>(); using (CsvReader reader = new CsvReader(new StreamReader(fileName), configuration)) { IEnumerable<DataRecord> dataRecords = reader.GetRecords<DataRecord>(); foreach (DataRecord dataRecord in dataRecords.ToList()) { fluorophores.AddRange(dataRecord.ToFloat()); } } return fluorophores.ToArray(); }
static void Main(string[] args) { CsvConfiguration cfg = new CsvConfiguration { Delimiter = ";", IsStrictMode = false }; CsvReader teamsReader = new CsvReader(new StreamReader(Path.Combine(BaseDir, TeamsFilename), Encoding.GetEncoding(1252)), cfg); List<TeamRegistration> teams = teamsReader.GetRecords<TeamRegistration>().ToList(); cfg = new CsvConfiguration { Delimiter = ";", IsStrictMode = false }; CsvReader checkPointTimesReader = new CsvReader(new StreamReader(Path.Combine(BaseDir, CheckpointTimesFilename), Encoding.GetEncoding(1252)), cfg); List<CheckpointTimeRegistration> checkpointTimes = checkPointTimesReader.GetRecords<CheckpointTimeRegistration>().ToList(); cfg = new CsvConfiguration { Delimiter = ";", IsStrictMode = false }; CsvReader teamScoresReader = new CsvReader(new StreamReader(Path.Combine(BaseDir, TeamScoresFilename), Encoding.GetEncoding(1252)), cfg); List<TeamScoreRegistration> teamScores = teamScoresReader.GetRecords<TeamScoreRegistration>().ToList(); cfg = new CsvConfiguration { Delimiter = ";", IsStrictMode = false }; CsvReader checkpointsReader = new CsvReader(new StreamReader(Path.Combine(BaseDir, CheckpointsFilename), Encoding.GetEncoding(1252)), cfg); List<CheckpointRegistration> checkpoints = checkpointsReader.GetRecords<CheckpointRegistration>().ToList(); CheckpointLocationsLoader locationsLoader = new CheckpointLocationsLoader(); List<CheckpointLocation> checkpointLocations = locationsLoader.LoadCheckpointsFromKML(Path.Combine(BaseDir, CheckpointLocationsFilename)); RaceDataCalculator converter = new RaceDataCalculator(); RaceData indecies = converter.CalcuateData(teams, checkpointTimes, teamScores, checkpoints, checkpointLocations); JSONDataGenerator generator = new JSONDataGenerator(); generator.Write(Path.Combine(OutputDir, JSONDataFilename), checkpointLocations, indecies); }
public CsvDataSetWriter() { config = new CsvConfiguration { CultureInfo = CultureInfo.InvariantCulture }; }
internal static CsvData ParseFileAndGetCsvData(string filePath, UploadConfig config) { CsvConfiguration configuration = new CsvConfiguration(); configuration.HasHeaderRecord = true; CsvReader csvReader = new CsvReader(new StreamReader(filePath), configuration); string[] header = default(string[]); List<string[]> rows = new List<string[]>(); string[] row; while (csvReader.Read()) { header = csvReader.FieldHeaders; row = new string[header.Length]; for (int j = 0; j < header.Length; j++) { row[j] = csvReader.GetField(j); } rows.Add(row); } return new CsvData { Header = header, Rows = rows }; }
internal string createCsvWithStructure(IEnumerable<dynamic> data, CsvConfiguration configuration) { var guid = Guid.NewGuid().ToString(); var builder = new StringBuilder(); var writer = new CsvHelper.CsvWriter(new StringWriter(builder), configuration); foreach (var column in Structure) writer.WriteField(column.Name); writer.NextRecord(); foreach (var datum in data) { var dict = ToDictionary(guid, datum); foreach (var column in Structure) { var value = dict[column.Name]; var str = string.IsNullOrEmpty(column.Format) ? value.ToString() : value.ToString(column.Format); writer.WriteField(str); } writer.NextRecord(); } return builder.ToString(); }
public CsvWriter(TextWriter writer, CsvConfiguration configuration) { Guard.ArgumentNotNull(() => writer); Guard.ArgumentNotNull(() => configuration); _writer = writer; this.Configuration = configuration; }
public static TableData LoadDataFromfCSV(string fileName, string delimeter = null, bool hasHeaderRecord = true, bool ignoreQuotes = true, int[] columnIndexes = null, int classIndex=-1) { var configuration = new CsvConfiguration(); configuration.Delimiter = delimeter ?? "\t"; configuration.HasExcelSeparator = false; configuration.IgnoreQuotes = ignoreQuotes; configuration.HasHeaderRecord = hasHeaderRecord; configuration.QuoteNoFields = true; using (var reader = new CsvReader(new StreamReader(fileName), configuration)) { var data = new TableData(); var index = 0; while (reader.Read()) { if (index == 0) { var noOfAttributes = hasHeaderRecord ? reader.FieldHeaders.Length : reader.CurrentRecord.Length; if (columnIndexes == null) { columnIndexes = new int[noOfAttributes]; for (var j = 0; j < columnIndexes.Length; j++) { columnIndexes[j] = j; } } for (int column = 0; column < columnIndexes.Length; column++) { var columnName = column == classIndex ? "Class" : hasHeaderRecord ? reader.FieldHeaders[columnIndexes[column]] : "Column" + column; data.AddAttribute(columnName); } index++; } var row = data.NewRow(); var attributes = data.Attributes.ToArray(); for (var columnIndex = 0; columnIndex < columnIndexes.Length; columnIndex++) { var columnName = attributes[columnIndex]; row[columnName] = reader.GetField(columnIndexes[columnIndex]); } data.AddRow(row); } return data; } }
private static CsvConfiguration CreateFileReaderConfiguration() { var configuration = new CsvConfiguration(); configuration.WillThrowOnMissingField = false; configuration.SkipEmptyRecords = true; configuration.Delimiter = "\t"; return configuration; }
public static CsvConfiguration ApplyStandardTopcatCsvConfiguration(CsvConfiguration config) { config.Delimiter = "\t"; config.PrefixReferenceHeaders = true; TypeConverterFactory.AddConverter<List<MetadataKeyword>>(new Exporter.MetadataKeywordConverter()); TypeConverterFactory.AddConverter<List<Extent>>(new Exporter.ExtentListConverter()); return config; }
public static ICsvWriter Create(FileInfoBase fileInfo) { var csvConfiguration = new CsvConfiguration { Delimiter = ";", HasHeaderRecord = true }; var streamWriter = new StreamWriter(fileInfo.OpenWrite(), Encoding.UTF8); return new CsvHelper.CsvWriter(streamWriter, csvConfiguration); }
public void FieldAttributeNameTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestClass>(); Assert.Equal( 4, config.Properties.Count ); Assert.Equal( "Guid Column", config.Properties[0].NameValue ); Assert.Equal( "Int Column", config.Properties[1].NameValue ); Assert.Equal( "String Column", config.Properties[2].NameValue ); Assert.Equal( "NotUsedColumn", config.Properties[3].NameValue ); }
public void AutoMapEnumerableTest() { var config = new CsvConfiguration(); try { config.AutoMap( typeof( List<string> ) ); Assert.Fail(); } catch( CsvConfigurationException ) { } }
public void EnsureInternalsAreSetupWhenPassingReaderAndConfigTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) { var config = new CsvConfiguration(); using( var parser = new CsvParser( reader, config ) ) { Assert.Same( config, parser.Configuration ); } } }
public void FieldAttributeIgnoreTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestClass>(); Assert.Equal( 4, config.Properties.Count ); Assert.False( config.Properties[0].IgnoreValue ); Assert.True( config.Properties[1].IgnoreValue ); Assert.False( config.Properties[2].IgnoreValue ); Assert.False( config.Properties[3].IgnoreValue ); }
public void FieldAttributeTypeConverterTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestClass>(); Assert.AreEqual( 4, config.Properties.Count ); Assert.IsInstanceOfType( config.Properties[0].TypeConverterValue, typeof( StringConverter ) ); Assert.IsInstanceOfType( config.Properties[1].TypeConverterValue, typeof( Int32Converter ) ); Assert.IsInstanceOfType( config.Properties[2].TypeConverterValue, typeof( Int16Converter ) ); Assert.IsInstanceOfType( config.Properties[3].TypeConverterValue, typeof( StringConverter ) ); }
public void FieldAttributeMultipleNamesTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestMultipleNamesClass>(); Assert.Equal( 2, config.Properties.Count ); Assert.Equal( 2, config.Properties[0].NamesValue.Length ); Assert.Equal( "Id1", config.Properties[0].NamesValue[0] ); Assert.Equal( "Id2", config.Properties[0].NamesValue[1] ); Assert.Equal( "Name1", config.Properties[1].NamesValue[0] ); Assert.Equal( "Name2", config.Properties[1].NamesValue[1] ); }
public DefaultImportService(ReferenceDataContext ctx) { _ctx = ctx; _csvConfiguration = new CsvConfiguration() { Delimiter = ";", CultureInfo = new CultureInfo("en"), HasHeaderRecord = true, IgnoreReadingExceptions = true }; }
public void EnsureInternalsAreSetupWhenPasingWriterAndConfigTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) { var config = new CsvConfiguration(); using( var csv = new CsvWriter( writer, config ) ) { Assert.AreSame( config, csv.Configuration ); } } }
public void FieldAttributeTypeConverterTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestClass>(); Assert.Equal( 4, config.Properties.Count ); Assert.IsType<StringConverter>( config.Properties[0].TypeConverterValue ); Assert.IsType<Int32Converter>( config.Properties[1].TypeConverterValue ); Assert.IsType<Int16Converter>( config.Properties[2].TypeConverterValue ); Assert.IsType<StringConverter>( config.Properties[3].TypeConverterValue ); }
public void FieldAttributeIndexTest() { var config = new CsvConfiguration(); config.AttributeMapping<TestClass>(); Assert.Equal( 4, config.Properties.Count ); Assert.Equal( 1, config.Properties[0].IndexValue ); Assert.Equal( 2, config.Properties[1].IndexValue ); Assert.Equal( 3, config.Properties[2].IndexValue ); Assert.Equal( -1, config.Properties[3].IndexValue ); }
public static List<LineItemLoad> LoadData(string fileName) { IEnumerable<LineItemLoad> records; var csvConfig = new CsvConfiguration { HasHeaderRecord = true, Delimiter = ",", WillThrowOnMissingField = false, IgnoreBlankLines = true }; using (var r = new CsvReader(new StreamReader(fileName), csvConfig)) { r.Configuration.RegisterClassMap<CsvLoadMap>(); records = r.GetRecords<LineItemLoad>().ToList(); } var data = records.ToList(); return data; }
public void CreateDatasetsFromOriginalDataset() { // step 1: dataset var config = new CsvConfiguration() { Delimiter = ",", HasHeaderRecord = true }; var container = new CrossDomainDataContainer(); var bookDomain = new Domain("book"); var musicDomain = new Domain("music"); var dvdDomain = new Domain("dvd"); var videoDomain = new Domain("video"); var bookReader = new CsvReader(Paths.AmazonAllBookRatings, config, bookDomain); var musicReader = new CsvReader(Paths.AmazonAllMusicRatings, config, musicDomain); var dvdReader = new CsvReader(Paths.AmazonAllDvdRatings, config, dvdDomain); var videoReader = new CsvReader(Paths.AmazonAllVideoRatings, config, videoDomain); bookReader.LoadData(container); musicReader.LoadData(container); dvdReader.LoadData(container); videoReader.LoadData(container); var output = container.Users.Values.Where(u => { var counts = u.Ratings.GroupBy(r => r.Domain).Select(g => g.Count()); return counts.All(c => c >= 1 && c <= 20) && (counts.Count() > 3); }) //.Select(u => new { UserId = u.Id, Counts = u.Ratings.GroupBy(r => r.Domain.Id).Select(g => g.Count().ToString()).Aggregate((a,b) => a + " " + b) }) //.Select(a => a.UserId + "," + a.Counts); .SelectMany(u => u.Ratings.Where(r => r.Domain == musicDomain)) //.SelectMany(u => u.Ratings.GroupBy(r => r.Item.Id).Select(g => g.Take(1).Single())) .Select(r => r.ToString()); Console.WriteLine("Writing..."); var header = new string[] { "UserId,ItemId,Rating" }; // selected1: only music between 5 to 20 // selected2: only music between 1 to 20 // selected3: only music between 2 to 20 // selected4: all domains with ratings between 1 to 20 File.WriteAllLines("music_selected4.csv", header.Concat(output)); //container.PrintStatistics(); }
/// <summary> /// Creates a new serializer using the given <see cref="TextWriter"/> /// and <see cref="CsvConfiguration"/>. /// </summary> /// <param name="writer">The <see cref="TextWriter"/> to write the CSV file data to.</param> /// <param name="configuration">The configuration.</param> public CsvSerializer(TextWriter writer, CsvConfiguration configuration) { if (writer == null) { throw new ArgumentNullException("writer"); } if (configuration == null) { throw new ArgumentNullException("configuration"); } this.writer = writer; this.configuration = configuration; }
public CsvWriterImpl(TextWriter tr, CsvConfiguration configuration) { csv = new CsvWriter(tr, configuration.CsvHelperConfiguration); csv.WriteHeader <T>(); csv.NextRecord(); }
/// <summary> /// Creates a new parser using a new <see cref="XLWorkbook"/> from the given <paramref name="path"/> and uses the given <paramref name="configuration"/>. /// </summary> /// <param name="path">The path.</param> /// <param name="configuration">The configuration.</param> public ExcelParser(string path, CsvConfiguration configuration, int rowOffset = 0) : this(new XLWorkbook(path, XLEventTracking.Disabled), configuration, rowOffset) { disposeWorkbook = true; }
public static string ToFormat(IList <ClassicEventLogData> logs, string format) { string data = string.Empty; if (string.IsNullOrEmpty(format)) { format = "json"; } if (logs != null && logs.Count > 0) { switch (format.ToLower(CultureInfo.CurrentCulture)) { case "txt": StringBuilder txtBuilder = new StringBuilder(string.Empty); foreach (ClassicEventLogData logData in logs) { txtBuilder.AppendFormat("{0}{1}", logData, Environment.NewLine); } data = txtBuilder.ToString(); break; case "json": JsonSerializerSettings settings = new JsonSerializerSettings { MaxDepth = int.MaxValue, Formatting = Formatting.None }; data = JsonConvert.SerializeObject(logs, settings); break; case "csv": CsvConfiguration config = new CsvConfiguration { AllowComments = false, DetectColumnCountChanges = true, IgnoreQuotes = true, QuoteAllFields = true, TrimFields = true }; StringBuilder csvBuilder = new StringBuilder(logs.Count); using (StringWriter sw = new StringWriter(csvBuilder, CultureInfo.InvariantCulture)) { CsvWriter csvWriter = new CsvWriter(sw, config); foreach (ClassicEventLogData log in logs) { csvWriter.WriteField <string>(log.Name); csvWriter.WriteField <int>(log.Sources.Count); // no header due to type mismatch for property csvWriter.NextRecord(); } sw.Flush(); } csvBuilder.Insert(0, "\"Name\",\"Sources\"" + Environment.NewLine); data = csvBuilder.ToString(); break; default: break; } } return(data); }
public static async Task <bool> TryIngestShortZipCodesAsync(string url, WashingtonStateContext db) { try { var pathtoFile = await url.DownloadFileAsync(AppContext.BaseDirectory); var fileName = string.Empty; // https://stackoverflow.com/questions/47973286/get-the-filename-of-a-file-that-was-created-through-zipfile-extracttodirectory //open archive using (var archive = ZipFile.OpenRead(pathtoFile)) { //since there is only one entry grab the first var entry = archive.Entries.FirstOrDefault(); //the relative path of the entry in the zip archive fileName = entry.FullName; } var pathToCSV = Path.Combine(AppContext.BaseDirectory, fileName); if (!File.Exists(pathToCSV)) { ZipFile.ExtractToDirectory(pathtoFile, AppContext.BaseDirectory); } db.ChangeTracker.AutoDetectChangesEnabled = false; var config = new CsvConfiguration(CultureInfo.InvariantCulture) { HasHeaderRecord = false, Delimiter = "," }; using (var reader = new StreamReader(pathToCSV)) using (var csv = new CsvReader(reader, config)) { var rowId = 0; csv.Context.RegisterClassMap <ShortZipMap>(); using var transaction = await db.Database.BeginTransactionAsync(); await foreach (var row in csv.GetRecordsAsync <ShortZip>()) { var command = db.Database.GetDbConnection().CreateCommand(); command.CommandText = $"insert into ZipCodes (ShortZipId, Zip, Plus4LowerBound, Plus4UpperBound, LocationCode, State, Local, TotalRate, EffectiveStartDate, EffectiveEndDate) values ($ShortZipId, $Zip, $Plus4LowerBound, $Plus4UpperBound, $LocationCode, $State, $Local, $TotalRate, $EffectiveStartDate, $EffectiveEndDate);"; var parameterShortZipId = command.CreateParameter(); parameterShortZipId.ParameterName = "$ShortZipId"; command.Parameters.Add(parameterShortZipId); parameterShortZipId.Value = rowId++; var parameterZip = command.CreateParameter(); parameterZip.ParameterName = "$Zip"; command.Parameters.Add(parameterZip); parameterZip.Value = string.IsNullOrWhiteSpace(row?.Zip) ? DBNull.Value : row.Zip; var parameterPlus4LowerBound = command.CreateParameter(); parameterPlus4LowerBound.ParameterName = "$Plus4LowerBound"; command.Parameters.Add(parameterPlus4LowerBound); parameterPlus4LowerBound.Value = string.IsNullOrWhiteSpace(row?.Plus4LowerBound) ? DBNull.Value : row.Plus4LowerBound; var parameterPlus4UpperBound = command.CreateParameter(); parameterPlus4UpperBound.ParameterName = "$Plus4UpperBound"; command.Parameters.Add(parameterPlus4UpperBound); parameterPlus4UpperBound.Value = string.IsNullOrWhiteSpace(row?.Plus4UpperBound) ? DBNull.Value : row.Plus4UpperBound; var parameterLocationCode = command.CreateParameter(); parameterLocationCode.ParameterName = "$LocationCode"; command.Parameters.Add(parameterLocationCode); parameterLocationCode.Value = row.LocationCode; var parameterState = command.CreateParameter(); parameterState.ParameterName = "$State"; command.Parameters.Add(parameterState); parameterState.Value = string.IsNullOrWhiteSpace(row?.State) ? DBNull.Value : row.State; var parameterLocal = command.CreateParameter(); parameterLocal.ParameterName = "$Local"; command.Parameters.Add(parameterLocal); parameterLocal.Value = string.IsNullOrWhiteSpace(row?.Local) ? DBNull.Value : row.Local; var parameterTotalRate = command.CreateParameter(); parameterTotalRate.ParameterName = "$TotalRate"; command.Parameters.Add(parameterTotalRate); parameterTotalRate.Value = string.IsNullOrWhiteSpace(row?.TotalRate) ? DBNull.Value : row.TotalRate; var parameterEffectiveStartDate = command.CreateParameter(); parameterEffectiveStartDate.ParameterName = "$EffectiveStartDate"; command.Parameters.Add(parameterEffectiveStartDate); parameterEffectiveStartDate.Value = row.EffectiveStartDate; var parameterEffectiveEndDate = command.CreateParameter(); parameterEffectiveEndDate.ParameterName = "$EffectiveEndDate"; command.Parameters.Add(parameterEffectiveEndDate); parameterEffectiveEndDate.Value = row.EffectiveEndDate; await command.ExecuteNonQueryAsync(); } await transaction.CommitAsync(); } return(true); } catch (Exception ex) { Log.Fatal("[Ingest] Failed to load Short Zip Codes into the database."); Log.Fatal(ex.Message); Log.Fatal(ex.StackTrace); return(false); } }
/// <summary> /// Creates a new parser using the given <see cref="TextReader"/> and <see cref="CsvConfiguration"/>. /// </summary> /// <param name="reader">The <see cref="TextReader"/> with the CSV file data.</param> /// <param name="configuration">The configuration.</param> public CsvParser(TextReader reader, CsvConfiguration configuration) : this(reader, configuration, false) { }
private async Task <List <MemberImportModel> > LoadData(ImportJob importJob, MemberImportJobModel importModel, CancellationToken cancellationToken) { var dataList = new List <MemberImportModel>(); var configuration = new CsvConfiguration(CultureInfo.CurrentCulture) { HasHeaderRecord = true }; using var blobStream = await _storageService.OpenReadAsync(importJob.StorageFile, cancellationToken); using var reader = new StreamReader(blobStream); using var csv = new CsvReader(reader, configuration); csv.Read(); csv.ReadHeader(); while (csv.Read()) { var item = new MemberImportModel(); item.Email = csv.GetField(importModel.EmailMapping); if (StringExtensions.IsNullOrEmpty(item.Email)) { continue; } if (StringExtensions.HasValue(importModel.DisplayNameMapping)) { item.DisplayName = csv.GetField(importModel.DisplayNameMapping); } if (StringExtensions.HasValue(importModel.SortNameMapping)) { item.SortName = csv.GetField(importModel.SortNameMapping); } if (StringExtensions.HasValue(importModel.GivenNameMapping)) { item.GivenName = csv.GetField(importModel.GivenNameMapping); } if (StringExtensions.HasValue(importModel.FamilyNameMapping)) { item.FamilyName = csv.GetField(importModel.FamilyNameMapping); } if (StringExtensions.HasValue(importModel.JobTitleMapping)) { item.JobTitle = csv.GetField(importModel.JobTitleMapping); } if (StringExtensions.HasValue(importModel.PhoneNumberMapping)) { item.PhoneNumber = csv.GetField(importModel.PhoneNumberMapping); } if (StringExtensions.IsNullOrEmpty(item.DisplayName)) { item.DisplayName = $"{item.GivenName} {item.FamilyName}".Trim(); } if (StringExtensions.IsNullOrEmpty(item.SortName)) { item.SortName = $"{item.FamilyName}, {item.GivenName}".Trim(); } dataList.Add(item); } return(dataList); }
public async Task Export(ExportArguments arguments) { var ctx = QueryContext.Default.Unpublished(arguments.Unpublished); var(_, service) = Configuration.Setup(); var client = service.CreateContentsClient <DummyEntity, DummyData>(arguments.Schema); if (arguments.Format == Format.JSON) { if (!string.IsNullOrWhiteSpace(arguments.Fields)) { throw new SquidexException("Fields are not used for JSON export."); } var fileOrFolder = arguments.Output; if (arguments.FilePerContent) { if (string.IsNullOrWhiteSpace(fileOrFolder)) { fileOrFolder = $"{arguments.Schema}_{DateTime.UtcNow:yyyy-MM-dd-hh-mm-ss}"; } if (!Directory.Exists(fileOrFolder)) { Directory.CreateDirectory(fileOrFolder); } } else { if (string.IsNullOrWhiteSpace(fileOrFolder)) { fileOrFolder = $"{arguments.Schema}_{DateTime.UtcNow:yyyy-MM-dd-hh-mm-ss}.json"; } if (File.Exists(fileOrFolder)) { File.Delete(fileOrFolder); } } await ExportAsync(arguments, entity => { if (arguments.FilePerContent) { File.WriteAllText(Path.Combine(fileOrFolder, $"{arguments.Schema}_{entity.Id}.json"), entity.JsonPrettyString()); } else { File.AppendAllText(fileOrFolder, entity.JsonPrettyString()); } }); } else { if (arguments.FilePerContent) { throw new SquidexException("Multiple files are not supported for CSV export."); } if (string.IsNullOrWhiteSpace(arguments.Fields)) { throw new SquidexException("Fields must be defined for CSV export."); } var file = arguments.Output; if (string.IsNullOrWhiteSpace(file)) { file = $"{arguments.Schema}_{DateTime.UtcNow:yyyy-MM-dd-hh-mm-ss}.csv"; } var converter = new Squidex2CsvConverter(arguments.Fields); using (var stream = new FileStream(file, FileMode.Create, FileAccess.Write)) { using (var streamWriter = new StreamWriter(stream)) { var csvOptions = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = ";" }; using (var writer = new CsvWriter(streamWriter, csvOptions)) { foreach (var fieldName in converter.FieldNames) { writer.WriteField(fieldName); } writer.NextRecord(); await ExportAsync(arguments, entity => { foreach (var value in converter.GetValues(entity)) { if (value is string text) { writer.WriteField(text, true); } else { writer.WriteField(value); } } writer.NextRecord(); }); } } } } }
// TODO: NEED TO ADD IMPLEMENTATION CODE public string CustomExportOrderAsync(ExportOrder model) { var sqlQuery = string.Empty; var filePath = string.Format("{1}\\{0:MM}{0:dd}{0:yyyy}-{0:HH}{0:mm}{0:ss}_CustomExportOrders.csv", model.RequestedDate, _exportFolder); try { using (var streamWriter = new StreamWriter(filePath)) { using (var conn = new MySqlConnection(_connectionString)) { var reader = MySqlHelper.ExecuteReader(conn, CommandType.Text, createCustomExportSqlQuery(model), null); var config = new CsvConfiguration(); config.Delimiter = model.Delimiter; var csvWriter = new CsvWriter(streamWriter, config); // write the header text for the CSV files foreach (var field in model.OrderFieldsArr) { csvWriter.WriteField(removePrefixTable(field)); } csvWriter.NextRecord(); while (reader.Read()) { foreach (var field in model.OrderFieldsArr) { var fieldName = removePrefixTable(field); if (fieldName == "TrackingNumber") { var trackingNumber = reader[fieldName].ToString(); if (!String.IsNullOrEmpty(trackingNumber)) { if (trackingNumber.Length == 22) { //Format USPS Tracking No. trackingNumber = Regex.Replace(trackingNumber, @"^(..)(....)(....)(....)(....)(....)$", "$1-$2-$3-$4-$5-$6"); csvWriter.WriteField(trackingNumber); } else { //Format FedEx Tracking Number trackingNumber = Regex.Replace(trackingNumber, @"^(..)(....)(....)(....)(....)(..)$", "$1-$2-$3-$4-$5-$6"); csvWriter.WriteField(trackingNumber); } } else { csvWriter.WriteField(trackingNumber); } } else { csvWriter.WriteField(reader[fieldName]); } } csvWriter.NextRecord(); } } } } catch (Exception) { throw; } return(filePath); }
private void btnImport_Click(object sender, EventArgs e) { var ms = new MemoryStream(); var csvConfig = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = "\t", Encoding = UTF8Encoding.UTF8, HasHeaderRecord = true, MissingFieldFound = null, BadDataFound = null, }; var qnaMakerClient = new QnAMakerClient(new ApiKeyServiceClientCredentials(ConfigurationManager.AppSettings["QnAMakerSubscriptionKey"])) { Endpoint = ConfigurationManager.AppSettings["QnAMakerApiEndpointUrl"] }; var knowledgeBase = ConfigurationManager.AppSettings["QnAMakerKnowledgeBaseId"]; var activityTableName = "ActivityEntity"; using (var reader = new StreamReader(tbFileName.Text)) using (var csv = new CsvReader(reader, csvConfig)) { csv.Read(); csv.ReadHeader(); while (csv.Read()) { var record = csv.GetRecord <ImportKbPoco>(); var activityReferenceId = Guid.NewGuid().ToString(); var random = new Random(); var activityId = random.Next().ToString(); // Update knowledgebase. qnaMakerClient.Knowledgebase.UpdateAsync(knowledgeBase, new UpdateKbOperationDTO { // Create JSON of changes. Add = new UpdateKbOperationDTOAdd { QnaList = new List <QnADTO> { new QnADTO { Questions = new List <string> { record.Question?.Trim() }, Answer = record.Answer?.Trim(), Metadata = new List <MetadataDTO>() { new MetadataDTO() { Name = Constants.MetadataCreatedAt, Value = DateTime.UtcNow.Ticks.ToString(CultureInfo.InvariantCulture) }, new MetadataDTO() { Name = Constants.MetadataCreatedBy, Value = "21514982-6fdb-496e-81c0-5755209438bc" }, new MetadataDTO() { Name = Constants.MetadataConversationId, Value = activityReferenceId }, new MetadataDTO() { Name = Constants.MetadataActivityReferenceId, Value = activityReferenceId }, }, }, }, }, Update = null, Delete = null, }).ConfigureAwait(false); tbQuestions.Text = tbQuestions.Text + $"Question added - : {record.Question}"; var storageAccount = CloudStorageAccount.Parse(ConfigurationManager.AppSettings["StorageConnectionString"]); var cloudTableClient = storageAccount.CreateCloudTableClient(); var cloudTable = cloudTableClient.GetTableReference(activityTableName); var activityEntity = new ActivityEntity { ActivityId = activityId, ActivityReferenceId = activityReferenceId }; var insertOrMergeOperation = TableOperation.InsertOrReplace(activityEntity); var result = cloudTable.ExecuteAsync(insertOrMergeOperation).ConfigureAwait(false); } } }
public ClubMap(CsvConfiguration configuration) { AutoMap(configuration); }
/// <summary> /// CSVファイルオープン処理 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void OpenCsvFilesCommand_Executed(object sender, ExecutedRoutedEventArgs e) { Properties.Settings settings = Properties.Settings.Default; string directory = string.Empty; string fileName = string.Empty; if (settings.App_CsvFilePath != string.Empty) { directory = Path.GetDirectoryName(settings.App_CsvFilePath); fileName = Path.GetFileName(settings.App_CsvFilePath); } OpenFileDialog ofd = new OpenFileDialog() { CheckFileExists = true, InitialDirectory = directory, FileName = fileName, Title = "ファイル選択", Filter = "CSVファイル|*.csv", Multiselect = true }; if (ofd.ShowDialog() == false) { return; } this.Cursor = Cursors.Wait; this.WVM.CsvFileName = Path.GetFileName(ofd.FileName); // 開いたCSVファイルのパスを設定として保存する settings.App_CsvFilePath = ofd.FileName; settings.Save(); int?actDateIndex = this.WVM.SelectedBookVM.ActDateIndex; int?itemNameIndex = this.WVM.SelectedBookVM.ItemNameIndex; int?outgoIndex = this.WVM.SelectedBookVM.OutgoIndex; // CSVファイルを読み込む CsvConfiguration csvConfig = new CsvConfiguration(System.Globalization.CultureInfo.CurrentCulture) { HasHeaderRecord = true, MissingFieldFound = (mffa) => { } }; List <CsvComparisonViewModel> tmpList = new List <CsvComparisonViewModel>(); foreach (string tmpFileName in ofd.FileNames) { using (CsvReader reader = new CsvReader(new StreamReader(tmpFileName, Encoding.GetEncoding("Shift_JIS")), csvConfig)) { while (reader.Read()) { try { if (reader.TryGetField(actDateIndex.Value, out DateTime date) && reader.TryGetField(itemNameIndex.Value, out string name) && reader.TryGetField(outgoIndex.Value, out string valueStr) && int.TryParse(valueStr, NumberStyles.Any, NumberFormatInfo.CurrentInfo, out int value)) { tmpList.Add(new CsvComparisonViewModel() { Record = new CsvComparisonViewModel.CsvRecord() { Date = date, Name = name, Value = value } }); } } catch (Exception) { } } } } foreach (CsvComparisonViewModel vm in tmpList) { this.WVM.CsvComparisonVMList.Add(vm); } this.csvCompDataGrid.ScrollToTop(); await this.UpdateComparisonInfoAsync(); // 合計値を計算する this.WVM.SumValue = this.WVM.CsvComparisonVMList.Sum(vm => vm.Record.Value); this.Cursor = null; }
private void fileSelectBtn_Click(object sender, EventArgs e) { if (csvFileLoader.ShowDialog() == DialogResult.OK) { // Force disable the CheckBox useHeaderCheckbox.Enabled = false; useHeaderCheckbox.Checked = false; // Force disable Buttons startTrainingBtn.Enabled = false; showTreeBtn.Enabled = false; predictionsBtn.Enabled = false; // Clear any previous training data tree = null; trainingData = new Dataset(); CsvConfiguration config = new CsvConfiguration(CultureInfo.InvariantCulture) { HasHeaderRecord = false }; // Wrap all this code in a try-catch block to catch parsing errors try { // Read the contents of the file into a stream using (Stream fileStream = csvFileLoader.OpenFile()) using (StreamReader reader = new StreamReader(fileStream)) using (CsvReader csv = new CsvReader(reader, config)) { int maxFeatureCount = 0; // Read every line individually while (csv.Read()) { // Parse the current record var record = csv.GetRecord <dynamic>(); Row featureRow = Row.ParseFromCsv(record); if (featureRow != null) { trainingData.Add(featureRow); if (featureRow.Count > maxFeatureCount) { maxFeatureCount = featureRow.Count; } } } // Add default headers for (int i = 0; i < maxFeatureCount; ++i) { string header = $"Field{i + 1}"; trainingData.Headers.Add(header); } } // Get the path of the specified file csvFilePath = csvFileLoader.SafeFileName; fileNameLabel.Text = $"Seleccionado: {csvFilePath}"; // Display the fields in the UI CreateFieldsFromDataset(fieldsPanel, trainingData); // Reset the CheckBox state to enabled useHeaderCheckbox.Enabled = true; // Re-enable the training button startTrainingBtn.Enabled = true; } catch (Exception) { // This will catch any type of Exception (for now) // Clear any fields that were added fieldsPanel.Controls.Clear(); // Clear any changes made to trainingData trainingData.Clear(); // Inform the user parsing has failed string title = "¡Uh-oh!"; string message = "Ha sucedido un problema al intentar leer el archivo CSV. Revisa que este tenga una sintaxis válida."; _ = MessageBox.Show(message, title, MessageBoxButtons.OK, MessageBoxIcon.Error); } } }
private async Task <int> BulkLoadAsync(MySqlBulkLoader loader, DataTable dataTable) { if (loader == null) { return(0); } string path = loader.FileName; if (string.IsNullOrEmpty(path)) { path = Path.GetTempFileName(); loader.FileName = path; } try { using (var writer = new StreamWriter(path)) { var configuration = new CsvConfiguration(CultureInfo.CurrentCulture) { HasHeaderRecord = false }; configuration.TypeConverterCache.AddConverter <DateTime?>(new NullDateTimeConverter()); using (var csv = new CsvWriter(writer, configuration)) { using (var dt = dataTable.Copy()) { foreach (DataColumn column in dt.Columns) { var columnName = column.ColumnName; if (columnName != RowNumberColumnName) { loader.Columns.Add(columnName); } } foreach (DataRow row in dt.Rows) { for (var i = 0; i < dt.Columns.Count; i++) { var column = dt.Columns[i]; var columnName = column.ColumnName; if (columnName != RowNumberColumnName) { csv.WriteField(row[i]); } } csv.NextRecord(); } } } } return(await loader.LoadAsync()); } catch (Exception e) { throw e; } finally { File.Delete(path); } }
private void OnCommandExportContent(object sender, EventArgs e) { // Export to CSV IDataMigrationView view = base._view as IDataMigrationView; IDataMigrationModel model = base._model as IDataMigrationModel; PresenterBase.SetModelPropertiesFromView <IDataMigrationModel, IDataMigrationView>( ref model, view ); IMainPresenter presenter = SimpleResolver.Instance.Get <IMainPresenter>(); try { if (SyncContext != null) { SyncContext.Post(delegate { presenter.EnableControl(false); presenter.ActivateSpinner(true); presenter.UpdateProgressInfo("Preparing Report..."); }, null); } FileInfo fileInfo = new FileInfo(model.OutputFileName); if (Directory.Exists(fileInfo.DirectoryName)) { Thread writeReport = new Thread(() => { CsvConfiguration config = new CsvConfiguration() { HasHeaderRecord = true, Delimiter = ",", Encoding = System.Text.Encoding.UTF8 }; config.RegisterClassMap(new DataMigrationHeaderMap()); int total = model.Contents.Count; using (CsvWriter writer = new CsvWriter(new StreamWriter(model.OutputFileName), config)) { writer.WriteHeader <DataMigrationHeaderRecord>(); for (int i = 0; i < model.Contents.Count; i++) { ContentDisplayListViewItemModel lvItem = model.Contents[i]; writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.OwnerName) ? lvItem.OwnerName : ""); writer.WriteField <string>(lvItem.Email); writer.WriteField <string>(lvItem.ItemPathDisplay); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.ItemPathId) ? lvItem.ItemPathId : ""); writer.WriteField <string>(lvItem.ItemName); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.ItemId) ? lvItem.ItemId : ""); writer.WriteField <string>(lvItem.ItemType); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.ItemSize) ? lvItem.ItemSize : ""); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.Created) ? lvItem.Created : ""); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.LastModified) ? lvItem.LastModified : ""); writer.WriteField <string>(!string.IsNullOrEmpty(lvItem.Uploaded) ? lvItem.Uploaded : ""); writer.NextRecord(); if (SyncContext != null) { SyncContext.Post(delegate { presenter.UpdateProgressInfo(string.Format("Writing Record: {0}/{1}", (i + 1), total)); }, null); } } } if (SyncContext != null) { SyncContext.Post(delegate { presenter.EnableControl(true); presenter.ActivateSpinner(false); presenter.UpdateProgressInfo("Completed"); }, null); } }); writeReport.Start(); } else { throw new InvalidDataException(ErrorMessages.INVALID_EXPORT_FOLDER); } } catch (Exception ex) { if (SyncContext != null) { SyncContext.Post(delegate { presenter.EnableControl(true); presenter.ActivateSpinner(false); presenter.UpdateProgressInfo("Completed with exception: " + ex.Message); }, null); } } }
/// <summary> /// Convenience method for Csv import operations /// </summary> /// <param name="importRepository">Database repository for entity</param> /// <param name="map">CSV Class Map for entity</param> /// <param name="importSettings">CSV Import Settings</param> /// <param name="resultType">Import Result Type</param> /// <param name="importFilename">Target Csv File</param> /// <typeparam name="TE">Entity Type</typeparam> /// <returns>ImportResult</returns> protected async Task <ImportResult> ImportCsv <TE>(IBulkImportRepository <TE> importRepository, ClassMap <TE> map, CsvImportSettings importSettings, ImportResultType resultType, string importFilename ) { ImportResult result = new ImportResult(); bool isRecordBad = false; IList <TE> recordBuffer = new List <TE>(); result.ImportArea = resultType; result.ProcessStarted = DateTime.Now; //Clean the import repository await importRepository.Clean(); //Buffer Flush function Func <Task> flushBuffer = async() => { await importRepository.BulkImport(recordBuffer); recordBuffer.Clear(); }; //Read the csv file using (TextReader reader = File.OpenText( Path.Combine(importSettings.LocalImportDirectory, importFilename))) { CsvConfiguration conf = new CsvConfiguration(CultureInfo.CurrentCulture); conf.Delimiter = importSettings.Delimiter; conf.BufferSize = importSettings.ReadBufferSize; conf.MissingFieldFound = null; conf.HasHeaderRecord = true; conf.AllowComments = true; conf.IgnoreQuotes = true; conf.RegisterClassMap(map); conf.BadDataFound = context => { isRecordBad = true; result.BadData.Add(new Tuple <string, int>(context.RawRecord, (result.RecordsImported + 1))); }; //Instantiate our Csv reader CsvReader csv = new CsvReader(reader, conf); //Iterate over the records and perform import while (await csv.ReadAsync()) { //Parse the record var record = csv.GetRecord <TE>(); //Check to see if it isn't bad if (!isRecordBad) { isRecordBad = false; recordBuffer.Add(record); result.RecordsImported++; //Flush the buffer to the database if threshold is reached if (recordBuffer.Count == importSettings.ReadBufferSize) { await flushBuffer(); } } } //Flush any remaining records await flushBuffer(); //Set the result to true result.Success = true; result.ProcessEnded = DateTime.Now; return(result); } }
/// <summary> /// Creates a new parser using the given <see cref="XLWorkbook"/> and <see cref="CsvConfiguration"/>. /// <remarks> /// Will attempt to read the data from the first worksheet in the workbook. /// </remarks> /// </summary> /// <param name="workbook">The <see cref="XLWorkbook"/> with the data.</param> /// <param name="configuration">The configuration.</param> public ExcelParser(XLWorkbook workbook, CsvConfiguration configuration, int rowOffset = 0) : this(workbook.Worksheets.First(), configuration, rowOffset) { }
public void make_op(object ob) { string[,] sim_to_ch = null;; try { sim_to_ch = functions_stl.get_sim_to_ch("cfg\\Список подстановки.txt"); } catch { MessageBox.Show("Не удалось загрузить Список подстановки.txt"); } object[] inf = ob as object[]; string xml = Convert.ToString(inf[0]); string folder_options = Convert.ToString(inf[1]); string save = Convert.ToString(inf[2]); cfg_data cfg = (cfg_data)inf[3]; string cfg_folder = Convert.ToString(inf[4]); List <string> tmpl_description = (List <string>)inf[5]; string [] key_words = (string[])inf[6]; const uint shift = 2; settings sets = new settings(/*xml, */ folder_options, save, cfg, cfg_folder); List <Options> options = new List <Options>(); // опции всех файлов List <int> index = new List <int>(); // список индексов из данных парсера string[] files_opions = Directory.GetFiles(folder_options); // список файлов if (files_opions.Length == 0) { return; } string name_f = ""; string save_uload = ""; List <string> title_tmp = new List <string>(); List <string> title = new List <string>(); List <string> unload = new List <string>(); Regex get_line = new Regex("(.+)$", RegexOptions.Multiline); string words = ""; StringBuilder sb = new StringBuilder(); foreach (string file_option in files_opions) { StringBuilder normal_file = new StringBuilder(); title_tmp.Clear(); name_f = Path.GetFileNameWithoutExtension(file_option); string fileText = ""; try { fileText = System.IO.File.ReadAllText(file_option, Encoding.Default); } catch { MessageBox.Show("Не найден фаил описания"); } // -------------------------------------------- Заголовок -------------------------------------------- Regex sub_string = new Regex(";"); words = get_line.Match(fileText).Groups[1].Value; string[] title_l = sub_string.Split(words); List <int> not_found_index = new List <int>(); // Индексы столбцов не найденных в заголовке for (int il = 0; il < title_l.Length; il++) { bool fine = false; if (il == shift) { /*title.Add("PROIZVODITEL"); */ title_tmp.Add("NAME"); title_tmp.Add("FULL_NAME"); title_tmp.Add("EKSPORT"); title_tmp.Add("DESCRIPTION"); title_tmp.Add("LENGTH_PACK"); title_tmp.Add("WIDTH_PACK"); title_tmp.Add("HEIGHT_PACK"); title_tmp.Add("WEIGHT_V"); title_tmp.Add("WEIGHT"); title_tmp.Add("DELIVERY_PACKAGE_TYPE"); title_tmp.Add("DELIVERY_PACKAGE"); } foreach (string[] it in sets.cfg.options) { if ((title_l[il] == "\"" + it[0] + "\"") || (title_l[il] == it[0])) { fine = true; title_tmp.Add(it[1]); // заголовок из наденных свойств break; } } if (!fine) { not_found_index.Add(il); unload.Add(title_l[il]); // не найденные свойства } } foreach (string s in unload) { save_uload += s + "\r\n"; } int i = 0; foreach (string option in title_tmp) { bool is_tl = false; i++; foreach (string tl in title) { if (option == tl) { is_tl = true; break; } } if (!is_tl) { title.Add(option); } if (i == shift) { /*title.Add("PROIZVODITEL"); */ title.Add("NAME"); title.Add("FULL_NAME"); title.Add("EKSPORT"); title.Add("DESCRIPTION"); title.Add("LENGTH_PACK"); title.Add("WIDTH_PACK"); title.Add("HEIGHT_PACK"); title.Add("WEIGHT_V"); title.Add("WEIGHT"); title.Add("DELIVERY_PACKAGE_TYPE"); title.Add("DELIVERY_PACKAGE"); } } // -------------------------------------------- Заголовок -------------------------------------------- title = title.Distinct().ToList(); foreach (string item in title_tmp) { normal_file.Append(item + ";"); } normal_file.Append("\r\n"); // файл с правильным заголовком Regex manual_get_line = new Regex("https(.*)", RegexOptions.Multiline); MatchCollection lines = get_line.Matches(fileText); StringBuilder str_bl = new StringBuilder(); string buf; // ---------------------------------- Удаление символов новои строки --------------------------------- for (i = 1; i < lines.Count; i++) { if (i == lines.Count - 1) { str_bl.Append(lines[i] + "\r\n"); break; } if (manual_get_line.IsMatch(lines[i + 1].Value)) { str_bl.Append(lines[i] + "\r\n"); } else { buf = lines[i].Value; buf = buf.Replace("\r\n", ""); buf = buf.Replace("\r", ""); buf = buf.Replace("\n", ""); buf += "__"; str_bl.Append(buf); } } // ---------------------------------- Удаление символов новои строки --------------------------------- lines = get_line.Matches(str_bl.ToString()); Regex r_celas = new Regex(";"); string rn; foreach (Match line in lines) { string[] cells = r_celas.Split(line.Value); buf = ""; for (i = 0; i < cells.Length; i++) { if (i == shift) { buf += ";;;;;;;;;;;"; } if (sets.equal(i, not_found_index)) // - пропуск не наиденных столбцов { continue; } buf += cells[i] + ";"; } normal_file.Append(buf); rn = normal_file[normal_file.Length - 3].ToString() + normal_file[normal_file.Length - 2]; if (rn != "\r\n") { normal_file.Append("\r\n"); } normal_file.Remove(normal_file.Length - 1, 1); } MatchCollection time = get_line.Matches(normal_file.ToString()); /*if (save_uload != "") * { * File.WriteAllText("unload.txt", save_uload); * //MessageBox.Show("Не все поля заголовка были найденны.\r\nНедостающие свойства сохнаненны в файл unload.txt"); * }*/ //File.WriteAllText("normal_file.csv", normal_file.ToString(), Encoding.Default); //MessageBox.Show("hi"); var config = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = ";", HeaderValidated = null, MissingFieldFound = null, Encoding = Encoding.GetEncoding(1251), //var info = new List<string>(); BadDataFound = data => { //info.Add(data.RawRecord); }, }; List <Options> options_values = new List <Options>(); StreamReader reader = new StreamReader(new MemoryStream(Encoding.GetEncoding(1251).GetBytes(normal_file.ToString())), Encoding.GetEncoding(1251)); using (var csv = new CsvReader(reader, config)) { var li = csv.GetRecords <Options>(); options_values = li.ToList(); options.AddRange(options_values); options_values = null; } } foreach (var tl in title) { sb.Append(tl + ";"); } sb.Remove(sb.Length - 1, 1); // удаление последнего разделительного символа sb.Append("\r\n"); // // удаление кавычек foreach (Options option in options) { if (option.id == "2624611") { // } } foreach (Options option in options) { option.id = Regex.Replace(option.id, "\"", ""); } // --------------------------- добавление в список индексов --------------------------- Regex r_id = new Regex("(.*)\\/(\\d*)"); foreach (Options option in options) { try { Match m_id = r_id.Match(option.id); int id = Convert.ToInt32(m_id.Groups[2].Value); index.Add(id); option.artnumber = id.ToString(); } catch { } } // --------------------------- добавление в список индексов --------------------------- // получение данных из хмл файла учитывая только индексы которые есть в списке options Get_xml xml_data = new Get_xml(xml, index); // --------------------------- формирование габаритов и веса -------------------------- foreach (Options option in options) { option.get_abc_weight(sets.cfg.coefficients_volume_and_mass, ref xml_data.get_xml_data); } // --------------------------- формирование габаритов и веса -------------------------- // --------------------------- выборка из массива классов свойств в string bufer для сохранения в текстовый файл --------------------------- foreach (Options option in options) { Xml_offer time_xml_line = xml_data.get_xml_data.Find(data => data.id == option.artnumber); time_xml_line.to_asci(sim_to_ch); if (time_xml_line != null) { option.proizvoditel = time_xml_line.vendor; } else { continue; } char[] se = { '_', '_' }; // --------------------------- формироание описания --------------------------- Dictionary <string, string> words_key = new Dictionary <string, string>(); if (key_words == null) { key_words = new string[0]; } foreach (var key in key_words) { if (functions_stl.get_property(key, option) != "" && functions_stl.get_property(key, option) != null) { words_key.Add(key, functions_stl.get_property(key, option)); } } if (words_key.ContainsKey("SOSTAV")) { words_key["SOSTAV"] = words_key["SOSTAV"].Split(se)[0]; } if (words_key.ContainsKey("SERIYA")) { words_key["SERIYA"] = words_key["SERIYA"].Split(se)[0]; } if (words_key.ContainsKey("MATERIAL")) { string material = option.material != "" ? option.material : option.sostav; if (material == "") { material = time_xml_line.composition; } words_key["MATERIAL"] = material; } if (key_words.Contains("NAME")) { words_key["NAME"] = time_xml_line.name_short(time_xml_line.name, sets.cfg.prepositions, sets.cfg.stop_words); } if (key_words.Contains("FULL_NAME")) { words_key["FULL_NAME"] = time_xml_line.name; } if (key_words.Contains("PRICE")) { words_key["PRICE"] = Convert.ToString(time_xml_line.price * sets.get_coefficient(time_xml_line.price)); } if (key_words.Contains("ID_CATEGORY")) { words_key["ID_CATEGORY"] = sets.get_name_of_category(time_xml_line.category); } option.description = classes.functions_stl.make_description(tmpl_description, words_key); // --------------------------- формироание описания --------------------------- foreach (string tl in title) { string value = functions_stl.get_property(tl.ToLower(), option); if (tl == "WEIGHT_V_GR" && option.WEIGHT_V_GR != "") { } // ------------------------------------------- игнорирование дубля ------------------------------------------- string[] cut_double = { "" }; if (tl == "SERIYA" || tl == "PRICE_FOR_THE_ONE" || tl == "PRICE_FOR" || tl == "PRICE_FOR_" || tl == "SOSTAV" || tl == "SIZE_2") { //words = get_line.Match(option.get_property(tl.ToLower(), option)).Groups[1].Value; if (value != "") { cut_double = value.Split(se); } sb.Append(cut_double[0] + ";"); } // ------------------------------------------- игнорирование дубля ------------------------------------------- else if (tl == "LENGTH_PACK" || tl == "WIDTH_PACK" || tl == "HEIGHT_PACK" || tl == "WEIGHT_V" || tl == "WEIGHT") { if (value == "0") { sb.Append(";"); } else { sb.Append(value + ";"); } } else if (tl == "DIAMETR_PISHUSHCHEGO_UZLA_MM") { value = value.Replace(".", ","); sb.Append(value + ";"); } else { sb.Append(value + ";"); } } sb.Remove(sb.Length - 1, 1); // удаление последнего разделительного символа sb.Append("\r\n"); //string hi = nameof(option); } options.Clear(); List <Options_stl> options_last = new List <Options_stl>(); List <Options_stl> options_new = new List <Options_stl>(); string[] last_artnumbers, new_artnumbers, new_id, equal; string file_last_option = Directory.GetFiles(save).FirstOrDefault(); if (file_last_option != null) { // ---------------------------------------------------- загрузка нового фаила с описанием ----------------------------------------------------- var config = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = ";", HeaderValidated = null, MissingFieldFound = null, Encoding = Encoding.GetEncoding(1251), //var info = new List<string>(); BadDataFound = data => { //info.Add(data.RawRecord); }, }; using (var reader = new StringReader(sb.ToString())) using (var csv = new CsvReader(reader, config)) { var li = csv.GetRecords <Options_stl>(); options_new = li.ToList(); } // ---------------------------------------------------- загрузка нового фаила с описанием ----------------------------------------------------- // -------------------------------------------------- загрузка предидущего фаила с описанием -------------------------------------------------- using (var reader = new StreamReader(file_last_option, Encoding.GetEncoding(1251))) using (var csv = new CsvReader(reader, config)) { var li = csv.GetRecords <Options_stl>(); options_last = li.ToList(); } // -------------------------------------------------- загрузка предидущего фаила с описанием -------------------------------------------------- new_artnumbers = Options_stl.get_artbumbers(options_new); last_artnumbers = Options_stl.get_artbumbers(options_last); new_id = new_artnumbers.Except(last_artnumbers).ToArray(); // нахождение новых позиции //equal = new_artnumbers.Intersect(last_artnumbers).ToArray(); // нахождение совпадении // добавление новых элементов Options_stl new_op = new Options_stl(); foreach (string st in new_id) { new_op = options_new.Find(l => l.artnumber == st); options_last.Add(new_op); } // ------------------------------------------------------------ формирование фаила ------------------------------------------------------------ // - заголовок sb.Clear(); foreach (string item in title) { sb.Append(item + ";"); } sb.Remove(sb.Length - 1, 1); sb.Append("\r\n"); // - заголовок string value; foreach (Options_stl op in options_last) { foreach (string tl in title) { value = op.get_property(tl.ToLower(), op); sb.Append(value + ";"); } sb.Remove(sb.Length - 1, 1); sb.Append("\r\n"); } // ------------------------------------------------------------ формирование фаила ------------------------------------------------------------ } // --------------------------- выборка из массива классов свойств в string bufer для созранения в текстовый файл --------------------------- //сохраняем говый файл File.WriteAllText(save + "\\" + Path.GetFileNameWithoutExtension(xml) + ".csv", sb.ToString(), Encoding.GetEncoding(1251)); // удаление файлов парсера foreach (string file_option in files_opions) { File.Delete(file_option); } }
private static IEnumerable <CsvDbUpdateModel> ParseDocuments(IEnumerable <string> filePathes, CsvConfiguration csvConfiguration) { foreach (var path in filePathes) { using (var stream = new StreamReader(path, csvConfiguration.Encoding, true)) using (var csv = new CsvReader(stream, csvConfiguration)) { yield return(ParseCsvReader(csv, path)); } } }
static DbWebApiOptions() { _CsvConfiguration = new CsvConfiguration(); SetCsvDateTimeConverter(); }
/// <summary> /// Creates a new CSV writer using the given <see cref="TextWriter"/>. /// </summary> /// <param name="writer">The <see cref="StreamWriter"/> use to write the CSV file.</param> /// <param name="configuration">The configuration.</param> public CsvWriter(TextWriter writer, CsvConfiguration configuration) : this(new CsvSerializer(writer, configuration, false)) { }
public void TwoColumnCsvWithHeadersAndPresentAndNullDataRowTest() { var config = new CsvConfiguration(CultureInfo.InvariantCulture) { IgnoreBlankLines = false, }; var parser = new ParserMock(config) { { "NullableInt32Field", "NullableStringField" }, { "1" }, { "", "Foo" }, { "", "" }, { "4", "Bar" }, }; using (var csv = new CsvReader(parser)) { csv.Context.TypeConverterOptionsCache.GetOptions <string>().NullValues.Add(string.Empty); // Read empty fields as nulls instead of `""`. // Read header row, assert header row columns: Assert.True(csv.Read()); Assert.True(csv.ReadHeader()); Assert.Equal(2, csv.HeaderRecord.Length); Assert.Equal("NullableInt32Field", csv.HeaderRecord[0]); Assert.Equal("NullableStringField", csv.HeaderRecord[1]); // Read first data row: Assert.True(csv.Read()); // Read `Int32?`, assert "1" value: var nullableIntValueByIndex = csv.GetField <int?>(0); var nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.True(nullableIntValueByIndex.HasValue); Assert.True(nullableIntValueByName.HasValue); Assert.Equal(1, nullableIntValueByIndex); Assert.Equal(1, nullableIntValueByName); // Read nullable String, assert null value: var strByIndex = csv.GetField <string>(1); var strByName = csv.GetField <string>("NullableStringField"); Assert.Null(strByIndex); Assert.Null(strByName); // Read second data row: Assert.True(csv.Read()); // Read `Int32?`, assert NULL value: nullableIntValueByIndex = csv.GetField <int?>(0); nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.False(nullableIntValueByIndex.HasValue); Assert.False(nullableIntValueByName.HasValue); // Read nullable String, assert "Foo" value: strByIndex = csv.GetField <string>(1); strByName = csv.GetField <string>("NullableStringField"); Assert.Equal("Foo", strByIndex); Assert.Equal("Foo", strByName); // Read third data row: Assert.True(csv.Read()); // Read `Int32?`, assert NULL value: nullableIntValueByIndex = csv.GetField <int?>(0); nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.False(nullableIntValueByIndex.HasValue); Assert.False(nullableIntValueByName.HasValue); // Read nullable String, assert "Foo" value: strByIndex = csv.GetField <string>(1); strByName = csv.GetField <string>("NullableStringField"); Assert.Null(strByIndex); Assert.Null(strByName); // Read fourth data row: Assert.True(csv.Read()); // Read `Int32?`, assert "3" value: nullableIntValueByIndex = csv.GetField <int?>(0); nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.True(nullableIntValueByIndex.HasValue); Assert.True(nullableIntValueByName.HasValue); Assert.Equal(4, nullableIntValueByIndex); Assert.Equal(4, nullableIntValueByName); // Read nullable String, assert "Bar" value: strByIndex = csv.GetField <string>(1); strByName = csv.GetField <string>("NullableStringField"); Assert.Equal("Bar", strByIndex); Assert.Equal("Bar", strByName); // Read to end of file: Assert.False(csv.Read()); } }
public static string ToFormat(IList <EventLogData> logs, string format) { string data = string.Empty; //string statsOutput = string.Empty; if (string.IsNullOrEmpty(format)) { format = "json"; } if (logs != null && logs.Any()) { switch (format.ToLower(CultureInfo.CurrentCulture)) { case "txt": StringBuilder txtBuilder = new StringBuilder(string.Empty); foreach (EventLogData logData in logs) { txtBuilder.AppendFormat("{0}{1}", logData, Environment.NewLine); } data = txtBuilder.ToString(); break; case "json": JsonSerializerSettings settings = new JsonSerializerSettings { MaxDepth = int.MaxValue, Formatting = Formatting.None }; data = JsonConvert.SerializeObject(logs, settings); break; case "csv": CsvConfiguration config = new CsvConfiguration { AllowComments = false, DetectColumnCountChanges = true, IgnoreQuotes = true, QuoteAllFields = true, TrimFields = true }; StringBuilder csvBuilder = new StringBuilder(logs.Count); using (StringWriter sw = new StringWriter(csvBuilder, CultureInfo.InvariantCulture)) { CsvWriter csvWriter = new CsvWriter(sw, config); foreach (EventLogData log in logs) { csvWriter.WriteField <string>(log.Name); csvWriter.WriteField <string>(log.Provider); csvWriter.WriteField <long>(log.MaximumSize); csvWriter.WriteField <string>(log.Retention); csvWriter.WriteField <bool>(log.IsEnabled); csvWriter.WriteField <bool>(log.IsClassic); csvWriter.WriteField <string>(log.LogType); csvWriter.WriteField <string>(log.Isolation); csvWriter.WriteField <Guid>(log.DebugGuid); csvWriter.WriteField <int>(log.Providers.Count); // no header due to type mismatch for property csvWriter.WriteField <string>(log.FilePath); //csvWriter.WriteField<string>(log.SecurityDescriptor.GetSddlForm(AccessControlSections.All)); csvWriter.WriteField <string>(log.SecurityDescriptor); csvWriter.NextRecord(); } sw.Flush(); } csvBuilder.Insert(0, "\"Name\",\"Provider\",\"MaximumSize\",\"Retention\",\"IsEnabled\",\"IsClassic\",\"LogType\",\"Isolation\",\"DebugGuid\",\"Providers\",\"FilePath\",\"SecurityDescriptor\"" + Environment.NewLine); data = csvBuilder.ToString(); break; default: break; } } return(data); }
public MemoryStream GenerateStream() { var streams = new Dictionary <string, MemoryStream>(); var writers = new Dictionary <string, CsvWriter>(); var csvConfiguration = new CsvConfiguration(new CultureInfo("nb-NO")); //var csvConfiguration = new CsvConfiguration(CultureInfo.InvariantCulture); var streamNames = new[] { "grunntyper", "kartleggingsenheter", "miljovariabler" }; foreach (var name in streamNames) { var memoryStream = new MemoryStream(); var streamWriter = new StreamWriter(memoryStream, Encoding.UTF8) { AutoFlush = true }; var csvWriter = CreateCsvWriter(streamWriter, csvConfiguration); csvWriter.WriteHeader <Natursystem>(); csvWriter.WriteHeader <Hovedtypegruppe>(); csvWriter.WriteHeader <Hovedtype>(); switch (name) { case "grunntyper": csvWriter.WriteHeader <Grunntype>(); break; case "kartleggingsenheter": csvWriter.WriteHeader <Kartleggingsenhet>(); break; case "miljovariabler": csvWriter.WriteHeader <Miljovariabel>(); csvWriter.WriteHeader <Trinn>(); csvWriter.WriteHeader <Basistrinn>(); break; } csvWriter.NextRecord(); streams.Add(name, memoryStream); writers.Add(name, csvWriter); } var na = _context.Natursystem .Include(x => x.Version) .Include(x => x.Kode) .Include(x => x.UnderordnetKoder) .FirstOrDefault(x => x.Version.Navn == _version); if (na == null) { return(null); } foreach (var hovedtypegruppe in na.UnderordnetKoder) { var htg = _context.Hovedtypegruppe .Include(x => x.Kode) .Include(x => x.UnderordnetKoder) .FirstOrDefault(x => x.Id == hovedtypegruppe.Id); if (htg == null) { continue; } foreach (var hovedtype in htg.UnderordnetKoder) { var ht = _context.Hovedtype .Include(x => x.Kode) .Include(x => x.UnderordnetKoder) .Include(x => x.Kartleggingsenheter) .Include(x => x.Miljovariabler) .FirstOrDefault(x => x.Id == hovedtype.Id); if (ht == null) { continue; } foreach (var grunntype in ht.UnderordnetKoder) { var gt = _context.Grunntype .Include(x => x.Kode) .FirstOrDefault(x => x.Id == grunntype.Id); if (gt == null) { continue; } writers["grunntyper"].WriteRecord(na); writers["grunntyper"].WriteRecord(htg); writers["grunntyper"].WriteRecord(ht); writers["grunntyper"].WriteRecord(gt); writers["grunntyper"].NextRecord(); } foreach (var kartleggingsenhet in ht.Kartleggingsenheter) { var k = _context.Kartleggingsenhet .Include(x => x.Kode) .FirstOrDefault(x => x.Id == kartleggingsenhet.Id); if (k == null) { continue; } writers["kartleggingsenheter"].WriteRecord(na); writers["kartleggingsenheter"].WriteRecord(htg); writers["kartleggingsenheter"].WriteRecord(ht); writers["kartleggingsenheter"].WriteRecord(k); writers["kartleggingsenheter"].NextRecord(); } foreach (var miljovariabel in ht.Miljovariabler) { var m = _context.Miljovariabel .Include(x => x.Kode) .Include(x => x.Trinn) .FirstOrDefault(x => x.Id == miljovariabel.Id); if (m == null) { continue; } foreach (var trinn in miljovariabel.Trinn) { var t = _context.Trinn .Include(x => x.Kode) .Include(x => x.Basistrinn) .FirstOrDefault(x => x.Id == trinn.Id); if (t == null) { continue; } foreach (var basistrinn in t.Basistrinn) { var bt = _context.Basistrinn //.Include(x => x.Kode) .FirstOrDefault(x => x.Id == basistrinn.Id); if (bt == null) { continue; } writers["miljovariabler"].WriteRecord(na); writers["miljovariabler"].WriteRecord(htg); writers["miljovariabler"].WriteRecord(ht); writers["miljovariabler"].WriteRecord(m); writers["miljovariabler"].WriteRecord(t); writers["miljovariabler"].WriteRecord(bt); writers["miljovariabler"].NextRecord(); } } } } } var zipFile = new ZipFile(); var outputStream = new MemoryStream(); foreach (var(key, stream) in streams) { stream.Seek(0, SeekOrigin.Begin); zipFile.AddEntry($"{key}_v{_version}.csv", stream); } zipFile.Save(outputStream); return(outputStream); }
/// <summary> /// Make an HTTP request. /// </summary> /// <param name="Route">URL route of API</param> /// <param name="Method">Method of HTTP request</param> /// <param name="Params">Additional paramerters</param> /// <returns>Varies according to API endpoint</returns> private dynamic Request(string Route, string Method, Dictionary <string, dynamic> Params = null) { string url = _root + _routes[Route]; if (Params == null) { Params = new Dictionary <string, dynamic>(); } if (url.Contains("{")) { var urlparams = Params.ToDictionary(entry => entry.Key, entry => entry.Value); foreach (KeyValuePair <string, dynamic> item in urlparams) { if (url.Contains("{" + item.Key + "}")) { url = url.Replace("{" + item.Key + "}", (string)item.Value); Params.Remove(item.Key); } } } //if (!Params.ContainsKey("api_key")) // Params.Add("api_key", _apiKey); //if (!Params.ContainsKey("access_token") && !String.IsNullOrEmpty(_accessToken)) // Params.Add("access_token", _accessToken); HttpWebRequest request; string paramString = String.Join("&", Params.Select(x => Utils.BuildParam(x.Key, x.Value))); if (Method == "POST" || Method == "PUT") { request = (HttpWebRequest)WebRequest.Create(url); request.AllowAutoRedirect = true; request.Method = Method; request.ContentType = "application/x-www-form-urlencoded"; request.ContentLength = paramString.Length; AddExtraHeaders(ref request); using (Stream webStream = request.GetRequestStream()) using (StreamWriter requestWriter = new StreamWriter(webStream)) requestWriter.Write(paramString); } else { request = (HttpWebRequest)WebRequest.Create(url + "?" + paramString); request.AllowAutoRedirect = true; request.Method = Method; AddExtraHeaders(ref request); } WebResponse webResponse; try { webResponse = request.GetResponse(); } catch (WebException e) { if (e.Response.Equals(null)) { throw; } webResponse = e.Response; } using (Stream webStream = webResponse.GetResponseStream()) { using (StreamReader responseReader = new StreamReader(webStream)) { string response = responseReader.ReadToEnd(); HttpStatusCode status = ((HttpWebResponse)webResponse).StatusCode; if (webResponse.ContentType == "application/json") { JObject responseDictionary = Utils.JsonDeserialize(response); if (status != HttpStatusCode.OK) { string errorType = "GeneralException"; string message = ""; if (responseDictionary["error_type"] != null) { errorType = (string)responseDictionary["error_type"]; } if (responseDictionary["message"] != null) { message = (string)responseDictionary["message"]; } switch (errorType) { case "GeneralException": throw new GeneralException(message, status); case "TokenException": { _sessionHook?.Invoke(); throw new TokenException(message, status); } case "PermissionException": throw new PermissionException(message, status); case "OrderException": throw new OrderException(message, status); case "InputException": throw new InputException(message, status); case "DataException": throw new DataException(message, status); case "NetworkException": throw new NetworkException(message, status); default: throw new GeneralException(message, status); } } return(responseDictionary); } else if (webResponse.ContentType == "text/csv") { //return Utils.ParseCSV(response); CsvConfiguration configuration = new CsvConfiguration(CultureInfo.InvariantCulture) { HasHeaderRecord = true, }; var reader = new StringReader(response); var csv = new CsvReader(reader, configuration); var csvRecords = csv.GetRecords <CsvInstrument>().ToList(); return(csvRecords); } else { throw new DataException("Unexpected content type " + webResponse.ContentType + " " + response); } } } }
public void SingleColumnCsvWithHeadersAndPresentAndNullDataRowTest() { var config = new CsvConfiguration(CultureInfo.InvariantCulture) { IgnoreBlankLines = false, }; var parser = new ParserMock(config) { { "NullableInt32Field" }, { "1" }, { new string[0] }, { "3" }, }; using (var csv = new CsvReader(parser)) { csv.Context.TypeConverterOptionsCache.GetOptions <int?>().NullValues.Add(string.Empty); // Read header row, assert header row columns: Assert.True(csv.Read()); Assert.True(csv.ReadHeader()); Assert.Single(csv.HeaderRecord); Assert.Equal("NullableInt32Field", csv.HeaderRecord[0]); // Read first data row, assert "1" value: Assert.True(csv.Read()); var nullableIntValueByIndex = csv.GetField <int?>(0); var nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.True(nullableIntValueByIndex.HasValue); Assert.True(nullableIntValueByName.HasValue); Assert.Equal(1, nullableIntValueByIndex); Assert.Equal(1, nullableIntValueByName); // Read second data row, assert null value: Assert.True(csv.Read()); nullableIntValueByIndex = csv.GetField <int?>(0); nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.False(nullableIntValueByIndex.HasValue); Assert.False(nullableIntValueByName.HasValue); // Read third data row, assert "3" value: Assert.True(csv.Read()); nullableIntValueByIndex = csv.GetField <int?>(0); nullableIntValueByName = csv.GetField <int?>("NullableInt32Field"); Assert.True(nullableIntValueByIndex.HasValue); Assert.True(nullableIntValueByName.HasValue); Assert.Equal(3, nullableIntValueByIndex); Assert.Equal(3, nullableIntValueByName); // Read to end of file: Assert.False(csv.Read()); } }
internal static IEnumerable <CsvDbUpdateModel> Process(IList <string> filePathes, CsvConfiguration csvConfiguration) { Ensure.Argument.NotNull(csvConfiguration, nameof(csvConfiguration)); Program.Logger.Debug($"Begin parsing documents: {filePathes.ToJsonLog()}"); var orderedFilePathes = new List <string>(); foreach (var path in filePathes) { if (!File.Exists(path) && !Directory.Exists(path)) { throw new FileNotFoundException("Неправильно указан путь к файлам записанных действий: " + path); } if ((File.GetAttributes(path) & FileAttributes.Directory) == FileAttributes.Directory) { orderedFilePathes.AddRange(GetOrderedDirectoryFilePathes(path)); } else { orderedFilePathes.Add(path); } } Program.Logger.Info($"Total files will be processed: {orderedFilePathes.Count}."); Program.Logger.Debug($"Documents will be processed in next order: {orderedFilePathes.ToJsonLog()}"); return(ParseDocuments(orderedFilePathes, csvConfiguration)); }
public ActionResult ImportForceRecord(HttpPostedFileBase forceFile) { int csvStartFromLine = 16; int excelStartFromLine = 9; //try //{ if (forceFile == null) { throw new Exception("No file is uploaded to system"); } var appData = Server.MapPath("~/TmpFile/"); var filename = Path.Combine(appData, DateTime.Now.ToString("yyyyMMddHHmmss") + "_" + Path.GetFileName(forceFile.FileName)); forceFile.SaveAs(filename); string ex = Path.GetExtension(filename); List <CuttingOrderImportModel> records = new List <CuttingOrderImportModel>(); if (ex.Equals(".csv")) { CsvConfiguration configuration = new CsvConfiguration(); configuration.Delimiter = Settings.Default.csvDelimiter; configuration.HasHeaderRecord = true; configuration.SkipEmptyRecords = true; configuration.RegisterClassMap <ProcessOrderCsvModelMap>(); configuration.TrimHeaders = true; configuration.TrimFields = true; using (TextReader treader = System.IO.File.OpenText(filename)) { for (int i = 0; true; i++) { string s = treader.ReadLine(); if (i >= csvStartFromLine) { if (string.IsNullOrWhiteSpace(s)) { break; } string[] fields = s.Split(char.Parse(Settings.Default.csvDelimiter)); records.Add(new CuttingOrderImportModel() { type = CuttingOrderImportModelType.White, Date = fields[0], Time = fields[1], CuttingOrder = fields[2], CuttingPosition = fields[3], SingleResource = fields[4], ResourceGroup = fields[5], StaffNumber = fields[6], WireNumber = fields[7], PartNumber = fields[8], KanbanNumber = fields[9], CutQtyDisplay = fields[10] }); } } } } else if (Path.GetExtension(filename).Equals(".xlsx")) { FileInfo file = new FileInfo(filename); using (ExcelPackage ep = new ExcelPackage(file)) { ExcelWorksheet ws = ep.Workbook.Worksheets.First(); //string s = ws.Cells[1, 1].Value.ToString(); //s = ws.Cells[1, 2].ToString(); //int i = ws.Dimension.End.Row; // int ii = ws.Dimension.End.Row; for (int i = excelStartFromLine; i <= ws.Dimension.End.Row; i++) { string f = ws.Cells[i, 17].Value.ToString(); int feedback = int.Parse(ws.Cells[i, 17].Value.ToString()); if (feedback > 0) { records.Add(new CuttingOrderImportModel() { type = CuttingOrderImportModelType.Blue, Date = ws.Cells[i, 1].Value.ToString(), Time = ws.Cells[i, 2].Value.ToString(), // CuttingOrder = fields[2], // CuttingPosition = fields[3], // SingleResource = fields[4], ResourceGroup = ws.Cells[i, 10].Value.ToString(), // StaffNumber = fields[6], // WireNumber = fields[7], PartNumber = ws.Cells[i, 11].Value.ToString(), KanbanNumber = ws.Cells[i, 13].Value.ToString(), CutQtyDisplay = ws.Cells[i, 17].Value.ToString() }); } } } } bool success = true; List <BatchFinishOrderRecord> vr = new List <BatchFinishOrderRecord>(); foreach (CuttingOrderImportModel r in records) { vr.Add(new BatchFinishOrderRecord() { Id = r.Id, FixOrderNr = r.KanbanNumber, PartNr = r.PartNumber, Amount = r.CutQty, ProdTime = r.CutDateTime }); } if (vr.Count > 0) { IProcessOrderService ps = new ProcessOrderService(Settings.Default.db); Hashtable results = ps.ValidateFinishOrder(vr); success = Settings.Default.ignoreImportKBOrderError ? true : !results.ContainsKey("WARN"); ViewBag.Success = success; if (success) { ps.BatchFinishOrder(results["SUCCESS"] as List <BatchFinishOrderRecord>, true, false); ViewBag.Msg = "Finish Success!"; return(View()); } else { ViewBag.Msg = "Validate Warning!"; return(View(results["WARN"] as List <BatchFinishOrderRecord>)); } } else { ViewBag.Msg = "No Record"; return(View()); } //} //catch (Exception ex) { // throw ex; // ViewBag.Msg = ex.Message; // return View(); //} }
static void Main(string[] args) { List <CustomersModel> clientes = new List <CustomersModel>(); string path = @"C:\Users\je\Desktop\pruebaInercya\pruebaincercya\Customers\Customers\Customers.csv"; try { var config = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = ";", Encoding = Encoding.GetEncoding("ISO-8859-1") }; using (var reader = new StreamReader(path, Encoding.GetEncoding("ISO-8859-1"))) using (var csv = new CsvReader(reader, config)) { var records = csv.GetRecords <CustomersModel>(); List <CustomersModel> productos = new List <CustomersModel>(); clientes = productos; foreach (var r in records) { productos.Add(new CustomersModel { Id = r.Id, Name = r.Name, Address = r.Address, City = r.City, Country = r.Country, PostalCode = r.PostalCode, Phone = r.Phone }); } } } catch (Exception) { throw; } try { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(); builder.DataSource = "DESKTOP-64C2FJI"; builder.IntegratedSecurity = true; //builder.Password = "******"; builder.InitialCatalog = "TiendaGames"; using (SqlConnection connection = new SqlConnection(builder.ConnectionString)) { Console.WriteLine("\nQuery data example:"); Console.WriteLine("=========================================\n"); connection.Open(); //String sql = "select CategoryName from Categories"; String sql = @"INSERT INTO Customers2 (Id ,Name ,Address ,City ,Country ,PostalCode ,Phone) VALUES (@Id ,@Name ,@Address ,@City ,@Country ,@PostalCode ,@Phone)"; using (SqlCommand command = new SqlCommand(sql, connection)) { command.CommandType = CommandType.Text; command.Parameters.Add("@Id", SqlDbType.VarChar); command.Parameters.Add("@Name", SqlDbType.NVarChar); command.Parameters.Add("@Address", SqlDbType.NVarChar); command.Parameters.Add("@City", SqlDbType.NVarChar); command.Parameters.Add("@Country", SqlDbType.NVarChar); command.Parameters.Add("@PostalCode", SqlDbType.NVarChar); command.Parameters.Add("@Phone", SqlDbType.NVarChar); foreach (var item in clientes) { command.Parameters[0].Value = item.Id; command.Parameters[1].Value = item.Name; command.Parameters[2].Value = item.Address; command.Parameters[3].Value = item.City; command.Parameters[4].Value = item.Country; command.Parameters[5].Value = item.PostalCode; command.Parameters[6].Value = item.Phone; command.ExecuteNonQuery(); } connection.Close(); } } } catch (SqlException e) { Console.WriteLine(e.ToString()); } Console.WriteLine("\nDone. Press enter."); Console.ReadLine(); }
public LeagueMap(CsvConfiguration configuration) { AutoMap(configuration); }
public static string ToFormat(IList <EventSourceData> sources, string format) { string data = string.Empty; if (string.IsNullOrEmpty(format)) { format = "json"; } if (sources != null && sources.Count > 0) { switch (format.ToLower(CultureInfo.CurrentCulture)) { case "txt": StringBuilder txtBuilder = new StringBuilder(string.Empty); foreach (EventSourceData source in sources) { txtBuilder.AppendFormat("{0}{1}", source, Environment.NewLine); } data = txtBuilder.ToString(); break; case "json": JsonSerializerSettings settings = new JsonSerializerSettings { MaxDepth = int.MaxValue, Formatting = Formatting.Indented }; data = JsonConvert.SerializeObject(sources, settings); break; case "csv": CsvConfiguration config = new CsvConfiguration { AllowComments = false, DetectColumnCountChanges = true, IgnoreQuotes = true, QuoteAllFields = true, TrimFields = true }; StringBuilder csvBuilder = new StringBuilder(sources.Count); using (StringWriter sw = new StringWriter(csvBuilder, CultureInfo.InvariantCulture)) { CsvWriter csvWriter = new CsvWriter(sw, config); foreach (EventSourceData source in sources) { csvWriter.WriteField <string>(source.Name); csvWriter.WriteField <string>(source.LogName); csvWriter.WriteField <string>(string.Join("; ", source.EventMessageFiles.ToArray().Select(f => f.FileName).ToArray()).Trim().TrimEnd(';')); csvWriter.WriteField <string>(source.CategoryMessageFile); csvWriter.WriteField <string>(source.ParameterMessageFile); csvWriter.WriteField <Guid>(source.ProviderGuid); csvWriter.WriteField <string>((source.EventLevels.Bitmask != long.MaxValue ? source.EventLevels.ToString() : "")); csvWriter.WriteField <int>((source.CategoryCount != int.MaxValue ? source.CategoryCount : 0)); csvWriter.NextRecord(); } sw.Flush(); } csvBuilder.Insert(0, "\"Name\",\"EventLog\",\"EventMessageFiles\",\"CategoryMessageFile\",\"ParameterMessageFile\",\"ProviderGuid\",\"EventLevels\",\"CategoryCount\"" + Environment.NewLine); data = csvBuilder.ToString(); break; default: break; } } return(data); }