static void Main(string[] args) { var allMessages = TsvReader.ReadMessage(Path); PrintFullInfo(allMessages); PrintDatasetSizeDependentInfo(allMessages); }
public void TsvConvenienceSubclass() { const int NumRecords = 3; const int NumValues = 4; var builder = CreateTable(TsvReader.ValueDelimeter, TsvReader.RecordDelimeter, NumRecords, NumValues); var reader = new TsvReader(builder.ToString()); Assert.IsTrue(reader.Current.IsEmpty); Assert.AreEqual(-1, reader.CurrentIndex); for (var recordNum = 0; recordNum < NumRecords; recordNum++) { Assert.IsTrue(reader.MoveNextRecord()); Assert.IsTrue(reader.Current.IsEmpty); Assert.AreEqual(-1, reader.CurrentIndex); for (var valueNum = 0; valueNum < NumValues; valueNum++) { Assert.IsTrue(reader.MoveNextValue()); var value = reader.Current; Assert.AreEqual($"Value {recordNum}-{valueNum}", value.ToString()); } Assert.IsFalse(reader.MoveNextValue()); Assert.IsTrue(reader.Current.IsEmpty); Assert.AreEqual(-1, reader.CurrentIndex); } Assert.IsFalse(reader.MoveNextRecord()); Assert.IsTrue(reader.Current.IsEmpty); Assert.AreEqual(-1, reader.CurrentIndex); }
private List <Locale> GetLanguagesInSheets(IEnumerable <LocalizationAsset> assets) { var localizationsTable = new Dictionary <string, List <string> >(); foreach (LocalizationAsset asset in assets) { List <List <string> > lines; string text = asset.TextAsset.text.Replace("\r\n", "\n"); if (asset.Format == GoogleDriveDownloadFormat.CSV) { lines = CsvReader.Parse(text); } else { lines = TsvReader.Parse(text); } foreach (List <string> line in lines.SkipWhile(l => l[0] != "Polyglot").Skip(1)) { string keyName = line[0]; if (!string.IsNullOrWhiteSpace(keyName) && line.Count > 1) { List <string> localizations = line.Skip(2).ToList(); if (localizationsTable.ContainsKey(keyName)) { localizationsTable[keyName] = localizations; } else { localizationsTable.Add(keyName, localizations); } } } } var presentLanguages = new List <Locale>(); foreach (int lang in Enum.GetValues(typeof(Locale))) { foreach (List <string> localizations in localizationsTable.Values) { if (!string.IsNullOrWhiteSpace(localizations.ElementAtOrDefault(lang))) { presentLanguages.Add((Locale)lang); break; } } } return(presentLanguages); }
// Uncomment for performance testing or fuzzing //[Fact] //public void TsvReader_Performance() //{ // string largeFilePath = "<provide>"; // int rowCountRead = ParseFile(largeFilePath); //} //[Fact] //public void TsvReader_Fuzz() //{ // string folderPath = @"C:\Download"; // foreach (string filePath in Directory.EnumerateFiles(folderPath, "*.tsv", SearchOption.AllDirectories)) // { // int rowCountRead = ParseFile(filePath); // } //} private int ParseFile(string filePath) { using (TsvReader reader = new TsvReader(filePath)) { while (reader.NextRow()) { } return(reader.RowCountRead); } }
public static List <string> Get(string input) { switch (input) { case "tsv": IReader tsvReader = new TsvReader(); return(tsvReader.Read()); default: return(null); } }
private static void ImportTextFile(string text, GoogleDriveDownloadFormat format) { List <List <string> > rows; text = text.Replace("\r\n", "\n"); if (format == GoogleDriveDownloadFormat.CSV) { rows = CsvReader.Parse(text); } else { rows = TsvReader.Parse(text); } var canBegin = false; for (int rowIndex = 0; rowIndex < rows.Count; rowIndex++) { var row = rows[rowIndex]; var key = row[0]; if (string.IsNullOrEmpty(key) || IsLineBreak(key) || row.Count <= 1) { //Ignore empty lines in the sheet continue; } if (!canBegin) { if (key == "Polyglot" || key == "PolyMaster" || key == "BEGIN") { canBegin = true; } continue; } if (key == "END") { break; } //Remove key row.RemoveAt(0); //Remove description row.RemoveAt(0); if (languageStrings.ContainsKey(key)) { languageStrings[key] = row; continue; } languageStrings.Add(key, row); } }
void Open(string file) { try { OutputReaderBase reader; switch (Path.GetExtension(file)) { case ".xnc": case ".xnmc": case ".tsv": reader = new TsvReader(file); OpenReportNode(file, reader); break; case ".bnc": NextGenLab.Chart.Chart c = new NextGenLab.Chart.Chart(); c.MdiParent = this; c.Open(file, false); c.Show(); break; case ".bsdr": reader = new ORBBinaryReader(file, true); OpenReportNode(file, reader); break; default: if (guessFileFormat) { rerd = RegExReaderData.GuessFormat(file); } if (rerd != null) { reader = new ORBRegExReader(file, true, rerd); } else { reader = new ORBCsvReader(file, true); } OpenReportNode(file, reader); break; } } catch (Exception ex) { MessageBox.Show(ex.Message, "Exception while opening file " + Path.GetFileName(file), MessageBoxButtons.OK, MessageBoxIcon.Error); } }
public void TsvReaderShouldReturnCollection() { var inputData = @"Id Name Price Amount 2 milk 39.99 600.00" ; var sut = new TsvReader("", false); var actual = sut.ProcessLines(inputData.Split('\r')); Assert.AreEqual(2, actual.First().Id); Assert.AreEqual(600.00, actual.First().Amount); Assert.AreEqual("milk", actual.First().Name); Assert.AreEqual(39.99, actual.First().Price); }
/// <summary> /// Decompress the raw data and desirialize the records. /// </summary> /// <param name="queryPlan">Optional for queries. Filters the recrods if passed.</param> internal void DeserializeDecompress(QueryPlan <T> queryPlan = null) { if (rawCompressedData == null) { return; } List <string> header = null; TsvMapping <T> tsvMapping = null; using var ms = new MemoryStream(rawCompressedData); using var gzip = new GZipStream(ms, CompressionMode.Decompress); using var tsv = new TsvReader(gzip); int lineCount = 0; Nullable <int> tsvColumnIndex = null; bool isFirstLine = true; bool notMatching = false; while (!tsv.EndOfStream) { var line = tsv.ReadLine(); if (isFirstLine) { // Use its own header for compatibility. header = line; isFirstLine = false; tsvMapping = new TsvMapping <T>(table, header); continue; } if (header.Count != line.Count) { throw new Exception($"Header length and column count mismatch in file '{FullPath}' at line {lineCount + 1}."); } /* * Filtering by 'Where' expressions */ if (queryPlan != null) { notMatching = false; foreach (var filter in queryPlan.FreeIndexFilters) { tsvColumnIndex = tsvMapping.FromRecordToTsv[filter.Key]; if (tsvColumnIndex == null || line[(int)tsvColumnIndex] != filter.Value) { notMatching = true; break; } } if (notMatching) { continue; } } var record = new T(); table.LoadFromTSVLine(tsvMapping, record, line); /* * Filtering by flex expressions */ if (queryPlan != null) { notMatching = false; foreach (var exp in queryPlan.Query.FlexFilters) { if (!exp(record)) { notMatching = true; break; } } if (notMatching) { continue; } } string unique = table.GetUnique(record); this.data[unique] = record; this.lines.Add(record); lineCount++; } /* * Sort records inside the packet */ if (queryPlan != null) { if (queryPlan.FreeSorting.Count > 0) { var props = table.Properties.ToArray(); this.lines.Sort((x, y) => { foreach (var directive in queryPlan.FreeSorting) { int dir = directive.Item2 == SortingDirection.Ascending ? 1 : -1; var valueX = props[directive.Item1].GetValue(x); var valueY = props[directive.Item1].GetValue(y); if (valueX == null && valueY == null) { continue; } if (valueX == null) { return(-dir); } if (valueY == null) { return(dir); } return(((IComparable)valueX).CompareTo((IComparable)valueY) * dir); } return(0); }); } } }
private async Task SyncOmniCoreExports() { var tsvReader = new TsvReader(Configuration); // InfluxWriteApi.WritePoints(Configuration.InfluxBucket, Configuration.InfluxOrgId, pds); }
internal List <Language> GetLanguagesInSheets(params LocalizationAsset[] assets) { List <Language> supported = new List <Language>(); supported.AddRange(Localization.Instance.GetField <List <Language>, Localization>("supportedLanguages")); var locTable = new Dictionary <string, List <string> >(); for (int i = 0; i < assets.Length; i++) { var asset = assets[i]; List <List <string> > textData; if (asset.Format == GoogleDriveDownloadFormat.CSV) { textData = CsvReader.Parse(asset.TextAsset.text.Replace("\r\n", "\n")); } else { textData = TsvReader.Parse(asset.TextAsset.text.Replace("\r\n", "\n")); } bool isValid = false; for (int a = 0; a < textData.Count; a++) { List <string> valList = textData[a]; string keyName = valList[0]; if (!string.IsNullOrEmpty(keyName) && !LocalizationImporter.IsLineBreak(keyName) && valList.Count > 1) { if (!isValid && keyName.StartsWith("Polyglot")) { isValid = true; } else if (isValid) { valList.RemoveAt(0); valList.RemoveAt(0); if (locTable.ContainsKey(keyName)) { locTable[keyName] = valList; } else { locTable.Add(keyName, valList); } } } } } ISet <int> validLanguages = new HashSet <int>(); foreach (var value in locTable.Values) { for (int i = 0; i < value.Count; i++) { if (!string.IsNullOrEmpty(value.ElementAtOrDefault(i))) { validLanguages.Add(i); } } } supported.Clear(); for (int i = 0; i < validLanguages.Count; i++) { supported.Add((Language)validLanguages.ElementAt(i)); } return(supported); }
private async Task SetModelData() { using var nsReader = new NightscoutReader(App.Configuration); var tsvReader = new TsvReader(App.Configuration); var s = Model1.Series.FirstOrDefault() as LineSeries; if (s == null) { s = new LineSeries() { YAxisKey = "bg", Title = "BG", Color = OxyColor.FromRgb(255, 0, 0) }; Model1.Series.Add(s); } s.Points.Clear(); //var last = 0d; await foreach (var gv in nsReader.BgValues(Start, End)) { s.Points.Add(new DataPoint(DateTimeAxis.ToDouble(gv.Time.LocalDateTime), 400 - gv.Value)); //if (last != 0d) // BgSeriesDiff.Points.Add(new DataPoint(DateTimeAxis.ToDouble(gv.Time.LocalDateTime), (last - gv.Value)*GvFactor)); //last = gv.Value; } while (Model1.Series.Count > 1) { Model1.Series.RemoveAt(1); } await foreach (var ps in tsvReader.PodSessions(Start, End)) { if (ps.Hormone == HormoneType.InsulinAspart) { var simulationSerie = new LineSeries() { YAxisKey = "insulin", Color = OxyColor.FromRgb(35, 215, 255) }; foreach (var iv in InsulinModel.Run(ps, TimeSpan.FromHours(12))) { simulationSerie.Points.Add(new DataPoint( DateTimeAxis.ToDouble(iv.To.AddMinutes(SimulationShift).LocalDateTime), Axis.ToDouble(iv.Value))); } Model1.Series.Add(simulationSerie); //var simulationSerie = new LineSeries() //{ // YAxisKey = "insulin", // Color = OxyColor.FromRgb(35, 215, 255) //}; //foreach (var iv in InsulinModel2.Run(ps, TimeSpan.FromMinutes(25))) //{ // simulationSerie.Points.Add(new DataPoint( // DateTimeAxis.ToDouble(iv.Date.AddMinutes(SimulationShift).LocalDateTime), // Axis.ToDouble(iv.Value))); //} //Model1.Series.Add(simulationSerie); //var deliverySeries = new LineSeries() //{ // YAxisKey = "insulin", // Color = OxyColor.FromRgb(0,128,255), // LineStyle = LineStyle.Dash //}; //decimal? previous = null; //TimeSpan tv = TimeSpan.FromMinutes(25); //foreach (var fv in ps.GetRates()) //{ // //Debug.WriteLine($"{fv.From}\t{fv.To}\t{fv.Value}"); // if (previous.HasValue) // deliverySeries.Points.Add(new DataPoint(DateTimeAxis.ToDouble(fv.Time.LocalDateTime + tv), // Axis.ToDouble(previous.Value))); // deliverySeries.Points.Add(new DataPoint(DateTimeAxis.ToDouble(fv.Time.LocalDateTime + tv), // Axis.ToDouble(fv.Value))); // previous = fv.Value; // //deliverySeries.Points.Add(new DataPoint(DateTimeAxis.ToDouble(fv.To.LocalDateTime), // // Axis.ToDouble(fv.Value))); //} //deliverySeries.Points.Add(new DataPoint(DateTimeAxis.ToDouble(End.LocalDateTime + tv), // Axis.ToDouble(previous.Value))); //Model1.Series.Add(deliverySeries); } } }
public List <Artist> GetArtists(string fileName) { var result = TsvReader.Read <Artist>(fileName, true); return(result); }
public void TsvReader_Basics() { int bufferSize = 64; // Empty file - verify no rows using (TsvReader reader = new TsvReader(StreamFromString(""), bufferSize)) { Assert.False(reader.NextRow()); // Verify double-dispose causes no problem reader.Dispose(); } // Single row file, no trailing newline using (TsvReader reader = new TsvReader(StreamFromString("One\tTwo\tThree"), bufferSize)) { Assert.Equal(0, reader.RowCountRead); Assert.True(reader.NextRow()); Assert.Equal(1, reader.RowCountRead); Assert.Equal("One|Two|Three", string.Join("|", reader.Current())); Assert.False(reader.NextRow()); Assert.Equal(1, reader.RowCountRead); } // Empty values using (TsvReader reader = new TsvReader(StreamFromString("\tValue\t\t"), bufferSize)) { Assert.True(reader.NextRow()); Assert.Equal("|Value||", string.Join("|", reader.Current())); Assert.False(reader.NextRow()); } // Newline variation and trailing newline using (TsvReader reader = new TsvReader(StreamFromString("One\nTwo\r\nThree\r\n"), bufferSize)) { Assert.True(reader.NextRow()); Assert.Equal("One", string.Join("|", reader.Current())); Assert.True(reader.NextRow()); Assert.Equal("Two", string.Join("|", reader.Current())); Assert.True(reader.NextRow()); Assert.Equal("Three", string.Join("|", reader.Current())); Assert.False(reader.NextRow()); } // Row requiring a buffer resize and verify nothing is missed string oneHundredColumns = string.Join("\t", Enumerable.Range(100, 100).Select(i => i.ToString())); using (TsvReader reader = new TsvReader(StreamFromString(oneHundredColumns), bufferSize)) { Assert.True(reader.NextRow()); Assert.Equal(100, reader.Current().Count); Assert.False(reader.NextRow()); } // Value exactly 2x buffer, requiring two buffer resizes to be read string valueRequiringBufferExpand = new string('0', 128); using (TsvReader reader = new TsvReader(StreamFromString($"One\tTwo\tThree\r\nSecond\tRow\r\n{valueRequiringBufferExpand}"), bufferSize)) { Assert.True(reader.NextRow()); Assert.Equal("One|Two|Three", string.Join("|", reader.Current())); Assert.True(reader.NextRow()); Assert.Equal("Second|Row", string.Join("|", reader.Current())); Assert.True(reader.NextRow()); Assert.Single(reader.Current()); Assert.Equal(valueRequiringBufferExpand, reader.Current()[0]); } // '\r' exactly at buffer boundary, requiring refill to track the unread '\n' to ignore using (TsvReader reader = new TsvReader(StreamFromString($"{new string('0', 63)}\r\nNextRow\r\n"), bufferSize)) { Assert.True(reader.NextRow()); Assert.Equal(new string('0', 63), string.Join("|", reader.Current())); Assert.True(reader.NextRow()); Assert.Equal("NextRow", string.Join("|", reader.Current())); Assert.False(reader.NextRow()); } }