public void AllFieldsEmptyTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.WriteLine( ";;;;" ); writer.WriteLine( ";;;;" ); writer.Flush(); stream.Position = 0; parser.Configuration.Delimiter = ";;"; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "", row[0] ); Assert.AreEqual( "", row[1] ); Assert.AreEqual( "", row[2] ); row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "", row[0] ); Assert.AreEqual( "", row[1] ); Assert.AreEqual( "", row[2] ); row = parser.Read(); Assert.IsNull( row ); } }
public void MultipleCharDelimiter2Test() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.WriteLine( "1``2``3" ); writer.WriteLine( "4``5``6" ); writer.Flush(); stream.Position = 0; parser.Configuration.Delimiter = "``"; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "1", row[0] ); Assert.AreEqual( "2", row[1] ); Assert.AreEqual( "3", row[2] ); row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "4", row[0] ); Assert.AreEqual( "5", row[1] ); Assert.AreEqual( "6", row[2] ); row = parser.Read(); Assert.IsNull( row ); } }
public void CallbackTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.WriteLine( " a\"bc\",d" ); writer.WriteLine( "\"a\"\"b\"c \" ,d" ); writer.WriteLine( "\"a\"\"b\",c" ); writer.Flush(); stream.Position = 0; string field = null; parser.Configuration.BadDataCallback = f => field = f; parser.Read(); Assert.IsNotNull( field ); Assert.AreEqual( " a\"bc\"", field ); field = null; parser.Read(); Assert.IsNotNull( field ); Assert.AreEqual( "a\"bc \" ", field ); field = null; parser.Read(); Assert.IsNull( field ); } }
public void ByteCountTestWithQuotedFieldsClosingQuoteAtStartOfBuffer() { var config = new Configuration.CsvConfiguration() { CountBytes = true, BufferSize = 4 }; using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader, config ) ) { writer.Write( "1,\"2\",3\r\n" ); writer.Write( "\"4\",5,\"6\"\r\n" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.AreEqual( 9, parser.BytePosition ); parser.Read(); Assert.AreEqual( 20, parser.BytePosition ); Assert.IsNull( parser.Read() ); } }
public void Setup() { var parser = new CsvParser(); _singleLineParser = parser; _parser = parser; }
public void ByteCountUsingCharWithMoreThanSingleByteTest() { var encoding = Encoding.Unicode; using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream, encoding ) ) using( var reader = new StreamReader( stream, encoding ) ) using( var parser = new CsvParser( reader ) ) { //崔钟铉 parser.Configuration.CountBytes = true; parser.Configuration.Encoding = encoding; writer.Write( "1,崔\r\n" ); writer.Write( "3,钟\r\n" ); writer.Write( "5,铉\r\n" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.Equal( 8, parser.BytePosition ); parser.Read(); Assert.Equal( 18, parser.BytePosition ); parser.Read(); Assert.Equal( 28, parser.BytePosition ); parser.Read(); Assert.Equal( 30, parser.BytePosition ); } }
public void DifferentDelimiterTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.WriteLine( "1\t2\t3" ); writer.WriteLine( "4\t5\t6" ); writer.Flush(); stream.Position = 0; parser.Configuration.Delimiter = "\t"; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "1", row[0] ); Assert.AreEqual( "2", row[1] ); Assert.AreEqual( "3", row[2] ); row = parser.Read(); Assert.IsNotNull( row ); Assert.AreEqual( 3, row.Length ); Assert.AreEqual( "4", row[0] ); Assert.AreEqual( "5", row[1] ); Assert.AreEqual( "6", row[2] ); row = parser.Read(); Assert.IsNull( row ); } }
public static string[][] Parse(string input) { CsvParser parser = new CsvParser(); using (StringReader reader = new StringReader(input)) { return parser.Parse(reader); } }
public void CharReadTotalTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { parser.Configuration.AllowComments = true; // This is a breakdown of the char counts. // Read() will read up to the first line end char // and any more on the line will get read with the next read. // [I][d][,][N][a][m][e][\r][\n] // 1 2 3 4 5 6 7 8 9 // [1][,][o][n][e][\r][\n] // 10 11 12 13 14 15 16 // [,][\r][\n] // 17 18 19 // [\r][\n] // 20 21 // [#][ ][c][o][m][m][e][n][t][s][\r][\n] // 22 23 24 25 26 27 28 29 30 31 32 33 // [2][,][t][w][o][\r][\n] // 34 35 36 37 38 39 40 // [3][,]["][t][h][r][e][e][,][ ][f][o][u][r]["][\r][\n] // 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 writer.WriteLine( "Id,Name" ); writer.WriteLine( "1,one" ); writer.WriteLine( "," ); writer.WriteLine( "" ); writer.WriteLine( "# comments" ); writer.WriteLine( "2,two" ); writer.WriteLine( "3,\"three, four\"" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.Equal( 8, parser.Position ); parser.Read(); Assert.Equal( 15, parser.Position ); parser.Read(); Assert.Equal( 18, parser.Position ); parser.Read(); Assert.Equal( 39, parser.Position ); parser.Read(); Assert.Equal( 56, parser.Position ); parser.Read(); Assert.Equal( 57, parser.Position ); } }
public void Test() { var data = CsvReaderSampleData.SampleData1; var reader = new StringReader(data); var parser = new CsvParser(reader, new CsvLayout(), new CsvBehaviour()); foreach (var line in parser) { } }
public void EnsureInternalsAreSetupWhenPassingReaderAndConfigTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) { var config = new CsvConfiguration(); using( var parser = new CsvParser( reader, config ) ) { Assert.Same( config, parser.Configuration ); } } }
public IEnumerable<CheckListDto> GetCkecklist() { Assembly assembly = Assembly.GetExecutingAssembly(); string resourceName = string.Concat(assembly.GetName().Name, '.', FILE_NAME); Stream stream = assembly.GetManifestResourceStream(resourceName); CsvParser<CheckListDto> parser = new CsvParser<CheckListDto>(stream); this.list = parser.Parse().ToList(); return list; }
public void EnsureInternalsAreSetupCorrectlyWhenPassingParserTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) { var parser = new CsvParser( reader ); using( var csv = new CsvReader( parser ) ) { Assert.Same( csv.Configuration, csv.Parser.Configuration ); Assert.Same( parser, csv.Parser ); } } }
public void TestNonDefaultRowDelimiterReading() { var settings = new CsvSettings() { RowDelimiter = "|" }; // also note, that it is single-char, not double char like default. const String csv = "1,2,3|4,\"| as data\",\n\r as data|7"; using (var parser = new CsvParser(csv, settings)) { String[][] rows = parser.ReadToEnd(); Assert.AreEqual(3, rows.Length, "Incorrect number of rows"); CollectionAssert.AreEqual(rows[0], new string[] { "1", "2", "3" }); CollectionAssert.AreEqual(rows[1], new string[] { "4", "| as data", "\n\r as data" }); CollectionAssert.AreEqual(rows[2], new string[] { "7" }); } }
static void Main(string[] args) { CsvParser parser = new CsvParser(); parser.Go(); for (int i = 0; i < 5000000; i++) GradDown(parser.Games); PrintAverageError(parser); foreach (Player p in parser.Players.Where(x => x.Quality != 0).OrderByDescending(x => x.Quality)) Console.WriteLine("{0, 11} {1, 5}", p.Name, p.Quality.ToString("0.00")); //Console.WriteLine("{0},{1}", p.Name, p.Quality.ToString("0.00")); Console.ReadLine(); }
public void TestNonDefaultFieldDelimiterReading() { var settings = new CsvSettings() { FieldDelimiter = '\t' }; const String csv = @"1 2 3 4 "" as data"" , as data 7"; using (var parser = new CsvParser(csv, settings)) { String[][] rows = parser.ReadToEnd(); Assert.AreEqual(3, rows.Length, "Incorrect number of rows"); CollectionAssert.AreEqual(rows[0], new string[] { "1", "2", "3" }); CollectionAssert.AreEqual(rows[1], new string[] { "4", " as data", ", as data" }); CollectionAssert.AreEqual(rows[2], new string[] { "7" }); } }
public void ParseInvalidSpace1FieldSpaceBeforeEqualsTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.Write( " =\"01\"\r\n" ); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; var row = parser.Read(); Assert.AreEqual( " =\"01\"", row[0] ); } }
public void CallingReadMultipleTimesAfterDoneReadingTest() { var stream = new MemoryStream(); var writer = new StreamWriter( stream ); writer.WriteLine( "one,two,three" ); writer.WriteLine( "four,five,six" ); writer.Flush(); stream.Position = 0; var reader = new StreamReader( stream ); var parser = new CsvParser( reader ); parser.Read(); parser.Read(); parser.Read(); parser.Read(); }
public void ParseValid1FieldNoLineEndingTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.Write( "=\"01\"" ); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.AreEqual( "01", row[0] ); } }
public void Parse1RecordWithNoCrlfTest() { using( var memoryStream = new MemoryStream() ) using( var streamReader = new StreamReader( memoryStream ) ) using( var streamWriter = new StreamWriter( memoryStream ) ) using( var parser = new CsvParser( streamReader ) ) { streamWriter.Write( "one,two,three" ); streamWriter.Flush(); memoryStream.Position = 0; var record = parser.Read(); Assert.IsNotNull( record ); Assert.AreEqual( 3, record.Length ); Assert.AreEqual( "one", record[0] ); Assert.AreEqual( "two", record[1] ); Assert.AreEqual( "three", record[2] ); } }
private static void PrintAverageError(CsvParser parser) { double totalError = 0d; foreach (Game g in parser.Games) { double pred = 0d; for (int i = 0; i < 5; i++) { pred += g.TA[i].Quality; pred -= g.TB[i].Quality; } double diff = g.GoalDiff - pred; totalError += diff * diff; } Console.WriteLine("MSE is: {0:0.000}", totalError / (double)parser.Games.Length); }
public void MultipleCharDelimiterWithPartOfDelimiterInFieldTest() { using( var stream = new MemoryStream() ) using( var reader = new StreamReader( stream ) ) using( var writer = new StreamWriter( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.Write( "1&|$2&3&|$4\r\n" ); writer.Flush(); stream.Position = 0; parser.Configuration.Delimiter = "&|$"; var line = parser.Read(); Assert.AreEqual( 3, line.Length ); Assert.AreEqual( "1", line[0] ); Assert.AreEqual( "2&3", line[1] ); Assert.AreEqual( "4", line[2] ); } }
public void ParseNewRecordTest() { var stream = new MemoryStream(); var writer = new StreamWriter( stream ); writer.WriteLine( "one,two,three" ); writer.WriteLine( "four,five,six" ); writer.Flush(); stream.Position = 0; var reader = new StreamReader( stream ); var parser = new CsvParser( reader ); var count = 0; while( parser.Read() != null ) { count++; } Assert.AreEqual( 2, count ); }
public void SimpleParseTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.Write( "1,2\r\n" ); writer.Write( "3,4\r\n" ); writer.Flush(); stream.Position = 0; var row = parser.Read(); Assert.IsNotNull( row ); row = parser.Read(); Assert.IsNotNull( row ); Assert.IsNull( parser.Read() ); } }
private static void ParseCountingBytesTest() { using( var stream = File.OpenRead( FileName ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { parser.Configuration.CountBytes = true; var stopwatch = new Stopwatch(); stopwatch.Start(); while( true ) { var record = parser.Read(); if( record == null ) { break; } } stopwatch.Stop(); Console.WriteLine( "Time: {0}", stopwatch.Elapsed ); } }
public void ParseEscapedFieldsTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { // "one","two","three" writer.WriteLine( "\"one\",\"two\",\"three\"" ); writer.Flush(); stream.Position = 0; var record = parser.Read(); Assert.IsNotNull( record ); Assert.AreEqual( 3, record.Length ); Assert.AreEqual( "one", record[0] ); Assert.AreEqual( "two", record[1] ); Assert.AreEqual( "three", record[2] ); } }
public void ParseEscapedFieldWithEscapedQuoteTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { // 1,"two "" 2",3 writer.WriteLine( "1,\"two \"\" 2\",3" ); writer.Flush(); stream.Position = 0; var record = parser.Read(); Assert.IsNotNull( record ); Assert.AreEqual( 3, record.Length ); Assert.AreEqual( "1", record[0] ); Assert.AreEqual( "two \" 2", record[1] ); Assert.AreEqual( "3", record[2] ); } }
public void ByteCountTestWithQuotedFields() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { parser.Configuration.CountBytes = true; writer.Write( "1,\"2\"\r\n" ); writer.Write( "\"3\",4\r\n" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.AreEqual( 7, parser.BytePosition ); parser.Read(); Assert.AreEqual( 14, parser.BytePosition ); Assert.IsNull( parser.Read() ); } }
public void RawRecordLfTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { writer.Write( "1,2\n" ); writer.Write( "3,4\n" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.AreEqual( "1,2\n", parser.RawRecord ); parser.Read(); Assert.AreEqual( "3,4\n", parser.RawRecord ); parser.Read(); Assert.AreEqual( null, parser.RawRecord ); } }
public void ByteCountTest() { using( var stream = new MemoryStream() ) using( var writer = new StreamWriter( stream ) ) using( var reader = new StreamReader( stream ) ) using( var parser = new CsvParser( reader ) ) { parser.Configuration.CountBytes = true; writer.Write( "1,2\r\n" ); writer.Write( "3,4\r\n" ); writer.Flush(); stream.Position = 0; parser.Read(); Assert.AreEqual( 5, parser.BytePosition ); parser.Read(); Assert.AreEqual( 10, parser.BytePosition ); Assert.IsNull( parser.Read() ); } }
public void ParseEscapedFieldWithSpaceBeforeTest() { using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader)) { parser.Configuration.Delimiter = ","; // one, "two",three writer.WriteLine("one, \"two\",three"); writer.Flush(); stream.Position = 0; parser.Configuration.BadDataFound = null; var record = parser.Read(); Assert.IsNotNull(record); Assert.AreEqual(3, record.Length); Assert.AreEqual("one", record[0]); Assert.AreEqual(" \"two\"", record[1]); Assert.AreEqual("three", record[2]); } }
public void ParseInvalid2FieldBufferOf1Test() { var config = new CsvConfiguration { BufferSize = 1 }; using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader, config)) { writer.Write("=01,=02\r\n"); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.AreEqual("=01", row[0]); Assert.AreEqual("=02", row[1]); } }
public void LocalWeatherReadTest() { bool[] keepOrder = new bool[] { true, false }; int[] degreeOfParallelismList = new[] { 4, 3, 2, 1 }; foreach (var order in keepOrder) { foreach (var degreeOfParallelism in degreeOfParallelismList) { CsvParserOptions csvParserOptions = new CsvParserOptions(true, new[] { ',' }, degreeOfParallelism, order); LocalWeatherDataMapper csvMapper = new LocalWeatherDataMapper(); CsvParser <LocalWeatherData> csvParser = new CsvParser <LocalWeatherData>(csvParserOptions, csvMapper); MeasurementUtils.MeasureElapsedTime(string.Format("LocalWeather (DegreeOfParallelism = {0}, KeepOrder = {1})", degreeOfParallelism, order), () => { var a = csvParser .ReadFromFile(@"C:\Users\philipp\Downloads\csv\201503hourly.txt", Encoding.ASCII) .ToList(); }); } } }
public void ParseEscapedFieldWithQuoteAfterTest() { using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader)) { // 1,"two" "2,3 writer.WriteLine("1,\"two\" \"2,3"); writer.Flush(); stream.Position = 0; var record = parser.Read(); Assert.IsNotNull(record); Assert.AreEqual(3, record.Length); Assert.AreEqual("1", record[0]); Assert.AreEqual("two \"2", record[1]); Assert.AreEqual("3", record[2]); Assert.IsNull(parser.Read()); } }
public void ParseValid1Field2RowsNoLineEndingTest() { using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader)) { writer.Write("=\"01\"\r\n"); writer.Write("=\"02\""); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; parser.Configuration.HasHeaderRecord = false; var row = parser.Read(); Assert.AreEqual("01", row[0]); row = parser.Read(); Assert.AreEqual("02", row[0]); } }
public string[,] Detect() { string str = string.IsNullOrEmpty(this._format.ColumnSeparators) ? "," : this._format.ColumnSeparators; CsvConfiguration configuration1 = new CsvConfiguration(); //configuration1.set_Delimiter(str); //configuration1.set_ThrowOnBadData(false); //configuration1.set_BadDataCallback(delegate (string readerContext) {}); configuration1.Delimiter = str; configuration1.ThrowOnBadData = false; configuration1.BadDataCallback = delegate(string readerContext) { }; CsvConfiguration configuration = configuration1; List <string[]> source = new List <string[]>(); int num = 0; using (CsvParser parser = new CsvParser(new StringReader(this._input), configuration)) { string[] item = null; while ((item = parser.Read()) != null) { num = Math.Max(item.Length, num); source.Add(item); } } string[,] strArray = new string[source.Count, num]; foreach (var type in source.SelectMany((r, i) => r.Select((c, j) => new { Value = c, Ridx = i, Cidx = j }))) { strArray[type.Ridx, type.Cidx] = type.Value; } return(strArray); }
public Task <CallsInfo[]> LoadDataAsync() { var fileName = GlobalSettings.CompareMode ? @"C:\Users\a.poturaev\Desktop\hackaton\rasmetka_2.csv" : @"C:\Users\a.poturaev\Desktop\hackaton\calls_test.csv"; _logger.LogInformation("Загрузка звонков из csv"); var csvParserOptions = GlobalSettings.UseCommaDelimeter ? new CsvParserOptions(true, ',') : new CsvParserOptions(true, '|'); var csvMapper = new CsvUserDetailsMapping(); var csvParser = new CsvParser <CallsInfo>(csvParserOptions, csvMapper); var result = csvParser .ReadFromFile(fileName, Encoding.UTF8); var info = result.Select(x => { if (x.IsValid) { return(x.Result); } else { if (x.Error.ColumnIndex == 0) { _logger.LogError($"Parsing error! rowIndex {x.RowIndex}, columnIndex {x.Error.ColumnIndex}, error {x.Error.Value}"); return(null); } else { _logger.LogTrace($"Parsing error! rowIndex {x.RowIndex}, columnIndex {x.Error.ColumnIndex}, error {x.Error.Value}"); return(x.Result); } } }).Where(x => x != null).ToArray(); _logger.LogInformation("Закончили"); return(Task.FromResult(info)); }
public IEnumerable <GoogleIssue> GetIssues() { if (!IsInputDataFileValid()) { var message = string.Format("Cannot access inoput file [{0}]. Ensure that the file exists and can be opened for READ access.", _inputDataFile); Logger.Error(message); throw new InvalidOperationException(message); } var csv = new CsvParser(new StringReader(File.ReadAllText(InputDataFile))); var issues = new List <GoogleIssue>(); while (true) { var elements = csv.Read(); if (elements == null) { break; } issues.Add( new GoogleIssue() { Id = Convert.ToInt16(elements[0]), IssueType = elements[1], Status = elements[2], Priority = elements[3], Milestone = elements[4], Owner = elements[5], Summary = elements[6], Labels = SplitStringIntoSeparateElements(elements[7]).ToList() }); } return(issues); }
public void CallbackTest() { string rawRecord = null; var config = new CsvConfiguration(CultureInfo.InvariantCulture) { BadDataFound = context => rawRecord = context.Parser.RawRecord.ToString(), }; using (var stream = new MemoryStream()) using (var reader = new StreamReader(stream)) using (var writer = new StreamWriter(stream)) using (var parser = new CsvParser(reader, config)) { writer.Write(" a\"bc\",d\r\n"); // a"bc",d\r\n writer.Write("\"e\"\"f\"g \" ,h\r\n"); // "e""f"g " ,h\r\n writer.Write("\"i\"\"j\",k\r\n"); // "i""j",k\r\n writer.Flush(); stream.Position = 0; parser.Read(); var record = parser.Record; Assert.IsNotNull(rawRecord); Assert.AreEqual(" a\"bc\",d\r\n", rawRecord); rawRecord = null; parser.Read(); record = parser.Record; Assert.IsNotNull(rawRecord); Assert.AreEqual("\"e\"\"f\"g \" ,h\r\n\"i\"\"j\",k\r\n", rawRecord); rawRecord = null; parser.Read(); record = parser.Record; Assert.IsNull(rawRecord); } }
static void DownloadNamesSheetPage(AdvLocalizeContent t) { DownloadManager.GoogleGetCSV((Result) => { CsvParser csvParser = new CsvParser(); string[][] csvTable = csvParser.Parse(Result); //搜尋開頭字串正不正確 int id_key = Array.IndexOf(csvTable[0], "Keys"); t.ActorNames = new List <LocalizeActorName>(); for (int i = 1; i < csvTable.Length; i++) { LocalizeActorName _actor = new LocalizeActorName() { key = csvTable[i][id_key] }; List <LocalizeText> _tags = new List <LocalizeText>(); for (int j = 0; j < csvTable[i].Length; j++) { if (j == id_key) { continue; } _tags.Add(new LocalizeText() { tag = csvTable[0][j], content = csvTable[i][j] }); } _actor.names = _tags; t.ActorNames.Add(_actor); } }, t.webServices, t.namesSheetPage.sheet_id, t.namesSheetPage.page_gid); }
public static void m_dicPartsTypeCamera_ext() { if (m_dicPartsTypeCamera_ext_flag) { return; } m_dicPartsTypeCamera_ext_flag = true; string text = "edit_attention_point_define_creator.nei"; using (AFileBase afileBase = GameUty.FileSystemMod.FileOpen(text)) { using (CsvParser csvParser = new CsvParser()) { bool condition = csvParser.Open(afileBase); NDebug.Assert(condition, text + "\nopen failed."); for (int i = 1; i < csvParser.max_cell_y; i++) { if (csvParser.IsCellToExistData(0, i)) { SceneEditInfo.CamToBone value = default(SceneEditInfo.CamToBone); int num = 0; MPN key = MPN.null_mpn; try { key = (MPN)Enum.Parse(typeof(MPN), csvParser.GetCellAsString(num++, i)); } catch {} value.bone = csvParser.GetCellAsString(num++, i); value.angle = wf.Parse.Vector2(csvParser.GetCellAsString(num++, i)); value.distance = csvParser.GetCellAsReal(num++, i); SceneEditInfo.m_dicPartsTypeCamera_[key] = value; } } } } }
public void ClassificationModelSelectingEnsembleLearner_Learn_Indexed() { var learners = new IIndexedLearner <ProbabilityPrediction>[] { new ClassificationDecisionTreeLearner(2), new ClassificationDecisionTreeLearner(5), new ClassificationDecisionTreeLearner(7), new ClassificationDecisionTreeLearner(9), new ClassificationDecisionTreeLearner(11), new ClassificationDecisionTreeLearner(21), new ClassificationDecisionTreeLearner(23), new ClassificationDecisionTreeLearner(1), new ClassificationDecisionTreeLearner(14), new ClassificationDecisionTreeLearner(17), new ClassificationDecisionTreeLearner(19), new ClassificationDecisionTreeLearner(33) }; var metric = new LogLossClassificationProbabilityMetric(); var ensembleStrategy = new MeanProbabilityClassificationEnsembleStrategy(); var ensembleSelection = new ForwardSearchClassificationEnsembleSelection(metric, ensembleStrategy, 5, 1, true); var sut = new ClassificationModelSelectingEnsembleLearner(learners, new RandomCrossValidation <ProbabilityPrediction>(5, 23), ensembleStrategy, ensembleSelection); var parser = new CsvParser(() => new StringReader(Resources.Glass)); var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix(); var targets = parser.EnumerateRows("Target").ToF64Vector(); var indices = Enumerable.Range(0, 25).ToArray(); var model = sut.Learn(observations, targets, indices); var predictions = model.PredictProbability(observations); var actual = metric.Error(targets, predictions); Assert.AreEqual(2.3682546920482164, actual, 0.0001); }
public static List <List <string> > LoadCSVFile(string fileName, bool firstRowHasHeaders) { // https://github.com/JoshClose/CsvHelper List <List <string> > csvData = new List <List <string> >(); StreamReader textReader = new StreamReader(fileName); CsvParser parser = new CsvParser(textReader); string[] csvDataRow = null; bool dropFirstRow = firstRowHasHeaders; try { while (true) { csvDataRow = parser.Read(); if (csvDataRow == null) { break; } if (!dropFirstRow) { csvData.Add(csvDataRow.ToList()); } dropFirstRow = false; } } catch (Exception e) { return(null); } parser.Dispose(); textReader.Close(); return(csvData); }
public void RegressionStackingEnsembleModel_GetVariableImportance() { var parser = new CsvParser(() => new StringReader(Resources.AptitudeData)); var observations = parser.EnumerateRows(v => v != "Pass").ToF64Matrix(); var targets = parser.EnumerateRows("Pass").ToF64Vector(); var featureNameToIndex = new Dictionary <string, int> { { "AptitudeTestScore", 0 }, { "PreviousExperience_month", 1 } }; var learners = new IIndexedLearner <double>[] { new RegressionDecisionTreeLearner(2), new RegressionDecisionTreeLearner(5), new RegressionDecisionTreeLearner(7), new RegressionDecisionTreeLearner(9) }; var learner = new RegressionStackingEnsembleLearner(learners, new RegressionDecisionTreeLearner(9), new RandomCrossValidation <double>(5, 23), false); var sut = learner.Learn(observations, targets); var actual = sut.GetVariableImportance(featureNameToIndex); var expected = new Dictionary <string, double> { { "RegressionDecisionTreeModel_2", 100 }, { "RegressionDecisionTreeModel_1", 69.7214491857349 }, { "RegressionDecisionTreeModel_0", 33.8678328474247 }, { "RegressionDecisionTreeModel_3", 1.70068027210884 } }; Assert.AreEqual(expected.Count, actual.Count); var zip = expected.Zip(actual, (e, a) => new { Expected = e, Actual = a }); foreach (var item in zip) { Assert.AreEqual(item.Expected.Key, item.Actual.Key); Assert.AreEqual(item.Expected.Value, item.Actual.Value, 0.000001); } }
public void RefillTextReaderMultipleTimesTest() { using (var stream = new MemoryStream()) using (var reader = new StreamReader(stream)) using (var writer = new StreamWriter(stream)) using (var parser = new CsvParser(reader, CultureInfo.InvariantCulture)) { writer.Write("1,2\r\n"); writer.Flush(); stream.Position = 0; Assert.True(parser.Read()); Assert.Equal("1", parser[0]); Assert.Equal("2", parser[1]); Assert.False(parser.Read()); var position = stream.Position; writer.Write("3,4\r\n"); writer.Flush(); stream.Position = position; Assert.True(parser.Read()); Assert.Equal("3", parser[0]); Assert.Equal("4", parser[1]); Assert.False(parser.Read()); position = stream.Position; writer.Write("5,6\r\n"); writer.Flush(); stream.Position = position; Assert.True(parser.Read()); Assert.Equal("5", parser[0]); Assert.Equal("6", parser[1]); Assert.False(parser.Read()); } }
public void ReadFromFileTest() { CsvParserOptions csvParserOptions = new CsvParserOptions(true, new[] { ';' }, 1, true); CsvPersonMapping csvMapper = new CsvPersonMapping(); CsvParser <Person> csvParser = new CsvParser <Person>(csvParserOptions, csvMapper); var stringBuilder = new StringBuilder() .AppendLine("FirstName;LastName;BirthDate") .AppendLine(" Philipp;Wagner;1986/05/12 ") .AppendLine("Max;Mustermann;2014/01/01"); var basePath = AppDomain.CurrentDomain.BaseDirectory; var filePath = Path.Combine(basePath, "test_file.txt"); File.WriteAllText(filePath, stringBuilder.ToString(), Encoding.UTF8); var result = csvParser .ReadFromFile(filePath.ToString(), Encoding.UTF8) .ToList(); Assert.AreEqual(2, result.Count); Assert.IsTrue(result.All(x => x.IsValid)); Assert.AreEqual("Philipp", result[0].Result.FirstName); Assert.AreEqual("Wagner", result[0].Result.LastName); Assert.AreEqual(1986, result[0].Result.BirthDate.Year); Assert.AreEqual(5, result[0].Result.BirthDate.Month); Assert.AreEqual(12, result[0].Result.BirthDate.Day); Assert.AreEqual("Max", result[1].Result.FirstName); Assert.AreEqual("Mustermann", result[1].Result.LastName); Assert.AreEqual(2014, result[1].Result.BirthDate.Year); Assert.AreEqual(1, result[1].Result.BirthDate.Month); Assert.AreEqual(1, result[1].Result.BirthDate.Day); }
void Start() { nextChunkPosition = new Vector3(0.0f, 0.0f, 5.0f); GameManager gm = FindObjectOfType <GameManager>(); if (gm != null && gm.LevelToLoad != "") { if (gm.LevelToLoad == "*RANDOM*") { LevelGenerator lg = new LevelGenerator(trackChunkPrefabs.Count, items.Count, enemies.Count, chanceToSpawnItem, chanceToSpawnEnemy); currentLevel = lg.Generate(100); } else { currentLevel = CsvParser.DeseriealizeLevel(gm.LevelToLoad); } } else { Debug.LogWarning("Level from GM not found, loading test level"); currentLevel = CsvParser.DeseriealizeLevel("LevelTest"); } }
public void DegreeOfParallelismTest() { int csvDataLines = 1000000; int[] degreeOfParallelismList = new[] { 1, 2, 4 }; StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i < csvDataLines; i++) { stringBuilder.AppendLine("Philipp;Wagner;1986/05/12"); } var csvData = stringBuilder.ToString(); foreach (var degreeOfParallelism in degreeOfParallelismList) { CsvParserOptions csvParserOptions = new CsvParserOptions(true, new[] { ';' }, degreeOfParallelism, true); CsvReaderOptions csvReaderOptions = new CsvReaderOptions(new[] { Environment.NewLine }); CsvPersonMapping csvMapper = new CsvPersonMapping(); CsvParser <Person> csvParser = new CsvParser <Person>(csvParserOptions, csvMapper); MeasurementUtils.MeasureElapsedTime(string.Format("DegreeOfParallelismTest (DegreeOfParallelism = {0})", degreeOfParallelism), () => csvParser.ReadFromString(csvReaderOptions, csvData).ToList()); } }
public void MultipleCharDelimiterWithPartOfDelimiterInFieldTest() { var config = new CsvConfiguration(CultureInfo.InvariantCulture) { Delimiter = "<|>", }; using (var stream = new MemoryStream()) using (var reader = new StreamReader(stream)) using (var writer = new StreamWriter(stream)) using (var parser = new CsvParser(reader, config)) { writer.Write("1<|>2<3<|>4\r\n"); writer.Flush(); stream.Position = 0; parser.Read(); Assert.Equal(3, parser.Count); Assert.Equal("1", parser[0]); Assert.Equal("2<3", parser[1]); Assert.Equal("4", parser[2]); } }
public void DifferentDelimiter3ByteCountTest() { using (var stream = new MemoryStream()) using (var reader = new StreamReader(stream)) using (var writer = new StreamWriter(stream)) using (var parser = new CsvParser(reader)) { writer.Write("1;;;2\r\n"); writer.Write("4;;;5\r\n"); writer.Flush(); stream.Position = 0; parser.Configuration.Delimiter = ";;;"; parser.Configuration.CountBytes = true; parser.Read(); Assert.AreEqual(7, parser.BytePosition); parser.Read(); Assert.AreEqual(14, parser.BytePosition); Assert.IsNull(parser.Read()); } }
public IList <T> CsvParserStart <T>(CsvMapping <T> csvMapper, string name_file) where T : class, new() { string file = Path + name_file; if (!File.Exists(file)) { throw new ApplicationException(String.Format("The file '{0}' is not exist.", file)); } CsvParserOptions csvParserOptions = new CsvParserOptions(true, ';'); CsvParser <T> csvParser = new CsvParser <T>(csvParserOptions, csvMapper); var result = csvParser .ReadFromFile(file, Encoding.Default) .ToList().ConvertAll(x => x.Result); if (result.Count == 0) { throw new ApplicationException("Cannot load data from csv-file: " + file); } return((IList <T>)result); }
public void ClassificationBinomialGradientBoostLearner_MultiClass_Stochastic_FeaturePrSplit_Learn_Indexed() { var parser = new CsvParser(() => new StringReader(Resources.Glass)); var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix(); var targets = parser.EnumerateRows("Target").ToF64Vector(); var sut = new ClassificationBinomialGradientBoostLearner(30, 0.1, 3, 1, 1e-6, 0.5, 3, false); var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) .ToArray(); var model = sut.Learn(observations, targets, indices); var predictions = model.Predict(observations); var indexedPredictions = predictions.GetIndices(indices); var indexedTargets = targets.GetIndices(indices); var evaluator = new TotalErrorClassificationMetric <double>(); var actual = evaluator.Error(indexedTargets, indexedPredictions); Assert.AreEqual(0.033557046979865772, actual); }
public void SkipHeaderTest() { CsvParserOptions csvParserOptions = new CsvParserOptions(true, ';'); CsvReaderOptions csvReaderOptions = new CsvReaderOptions(new[] { Environment.NewLine }); CsvPersonMapping csvMapper = new CsvPersonMapping(); CsvParser <Person> csvParser = new CsvParser <Person>(csvParserOptions, csvMapper); var stringBuilder = new StringBuilder() .AppendLine("FirstName;LastName;BirthDate") .AppendLine("Philipp;Wagner;1986/05/12") .AppendLine("Max;Mustermann;2014/01/01"); var result = csvParser .ReadFromString(csvReaderOptions, stringBuilder.ToString()) .ToList(); Assert.AreEqual(2, result.Count); Assert.IsTrue(result.All(x => x.IsValid)); Assert.AreEqual("Philipp", result[0].Result.FirstName); Assert.AreEqual("Wagner", result[0].Result.LastName); Assert.AreEqual(1, result[0].RowIndex); Assert.AreEqual(1986, result[0].Result.BirthDate.Year); Assert.AreEqual(5, result[0].Result.BirthDate.Month); Assert.AreEqual(12, result[0].Result.BirthDate.Day); Assert.AreEqual("Max", result[1].Result.FirstName); Assert.AreEqual("Mustermann", result[1].Result.LastName); Assert.AreEqual(2, result[1].RowIndex); Assert.AreEqual(2014, result[1].Result.BirthDate.Year); Assert.AreEqual(1, result[1].Result.BirthDate.Month); Assert.AreEqual(1, result[1].Result.BirthDate.Day); }
public void LoadFile(string path) { const int limit = 10000; var binded = false; using (var sr = new StreamReader(new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))) { using (var parser = new CsvParser(sr)) { var i = 0; while (true) { var row = parser.Read(); if (row == null) { break; } row = Concat(new[] { $"{i++ + 1}".PadLeft(6) }, row); if (!binded) { SetupColumnBinding(row.Length); binded = true; } if (i > limit) { Rows.Add(Enumerable.Repeat("...", row.Length).ToArray()); break; } Rows.Add(row); } } } }
public void ClassificationBinomialGradientBoostLearner_Stochastic_Learn_Indexed() { var parser = new CsvParser(() => new StringReader(Resources.AptitudeData)); var observations = parser.EnumerateRows("AptitudeTestScore", "PreviousExperience_month").ToF64Matrix(); var targets = parser.EnumerateRows("Pass").ToF64Vector(); var sut = new ClassificationBinomialGradientBoostLearner(30, 0.1, 3, 1, 1e-6, .5, 0, false); var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) .ToArray(); var model = sut.Learn(observations, targets, indices); var predictions = model.Predict(observations); var indexedPredictions = predictions.GetIndices(indices); var indexedTargets = targets.GetIndices(indices); var evaluator = new TotalErrorClassificationMetric <double>(); var actual = evaluator.Error(indexedTargets, indexedPredictions); Assert.AreEqual(0.055555555555555552, actual); }
public void ClassificationExtremelyRandomizedTreesLearner_Learn_Glass_100_Indices() { var parser = new CsvParser(() => new StringReader(Resources.Glass)); var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix(); var targets = parser.EnumerateRows("Target").ToF64Vector(); var rows = targets.Length; var sut = new ClassificationExtremelyRandomizedTreesLearner(100, 1, 100, 1, 0.0001, 1.0, 42, false); var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) .ToArray(); var model = sut.Learn(observations, targets, indices); var predictions = model.Predict(observations); var evaluator = new TotalErrorClassificationMetric <double>(); var error = evaluator.Error(targets, predictions); Assert.AreEqual(0.14485981308411214, error, 0.0000001); }
public void ClassificationAdaBoostLearner_Learn_Glass_Indexed() { var parser = new CsvParser(() => new StringReader(Resources.Glass)); var observations = parser.EnumerateRows(v => v != "Target").ToF64Matrix(); var targets = parser.EnumerateRows("Target").ToF64Vector(); var sut = new ClassificationAdaBoostLearner(10, 1, 5); var indices = Enumerable.Range(0, targets.Length).ToArray(); indices.Shuffle(new Random(42)); indices = indices.Take((int)(targets.Length * 0.7)) .ToArray(); var model = sut.Learn(observations, targets, indices); var predictions = model.Predict(observations); var indexedPredictions = predictions.GetIndices(indices); var indexedTargets = targets.GetIndices(indices); var evaluator = new TotalErrorClassificationMetric <double>(); var actual = evaluator.Error(indexedTargets, indexedPredictions); Assert.AreEqual(0.0, actual); }
private void ParseObjectPerTable(TextReader csvReader, string type = null, string postfix = null) { var parser = new CsvParser(csvReader); var fieldNames = new List <string>(); var fieldValues = new List <string>(); var row = parser.Read(); // get first row and // Read fields while (row != null) // && row[0] != "ID") { fieldNames.Add(row[0]); fieldValues.Add(row[1]); row = parser.Read(); } var instanceName = csv.Utility.FixName(type, postfix); _loadedObjects.Add(instanceName, CreateValues(fieldNames, fieldValues, startingIndex: 0)); _fieldNames = fieldNames.ToArray(); }
public void ParseValid2Fields2RowsBytePositionDifferentCultureTest() { var originalCulture = CultureInfo.CurrentCulture; try { CultureInfo.CurrentCulture = new CultureInfo("uk-UA"); using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader)) { var csvRow1 = "=\"01\",=\"02\"\r\n"; var csvRow2 = "=\"03\",=\"04\""; writer.Write(csvRow1 + csvRow2); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; parser.Configuration.CountBytes = true; parser.Read(); Assert.AreEqual(Encoding.GetEncoding(0).GetByteCount(csvRow1), parser.BytePosition); parser.Read(); Assert.AreEqual(Encoding.GetEncoding(0).GetByteCount(csvRow1 + csvRow2), parser.BytePosition); } } finally { CultureInfo.CurrentCulture = originalCulture; } }
public void ParseValid2Fields2RowsTest() { using (var stream = new MemoryStream()) using (var writer = new StreamWriter(stream)) using (var reader = new StreamReader(stream)) using (var parser = new CsvParser(reader)) { writer.Write("=\"01\",=\"02\"\r\n"); writer.Write("=\"03\",=\"04\"\r\n"); writer.Flush(); stream.Position = 0; parser.Configuration.UseExcelLeadingZerosFormatForNumerics = true; var row = parser.Read(); Assert.AreEqual("01", row[0]); Assert.AreEqual("02", row[1]); row = parser.Read(); Assert.AreEqual("03", row[0]); Assert.AreEqual("04", row[1]); } }