private async void ExecuteDownloadData() { var eventResponse = ""; try { var csvraw = await App.GetCSV("https://standards.ieee.org/develop/regauth/oui/oui.csv");//http://demo0369881.mockable.io/macdata var options = new Options('"', '\\', ','); var tokenizer = new RFC4180Tokenizer(options); CsvParserOptions csvParserOptions = new CsvParserOptions(true, tokenizer); OUIObjectMap csvMapper = new OUIObjectMap(); CsvParser <Mac> csvParser = new CsvParser <Mac>(csvParserOptions, csvMapper); var opts = new CsvReaderOptions(new[] { Environment.NewLine }); var result = csvParser.ReadFromString(opts, csvraw); System.Diagnostics.Debug.WriteLine("Writing to db..."); App.db.BeginTransaction(); foreach (var mac in result) { App.db.InsertOrReplace(mac.Result); } App.db.Commit(); System.Diagnostics.Debug.WriteLine("Writing to db OK"); eventResponse = DownloadEvent.EVENT_SUCCESS; } catch (Exception e) { eventResponse = DownloadEvent.EVENT_FAILED; } _eventAggregator.GetEvent <DownloadEvent>().Publish(eventResponse); await _navigationService.ClearPopupStackAsync(); }
public static CsvParser <CatalogItem> CreateParser() { var tokenizerOptions = new Options('"', '\\', ','); var tokenizer = new RFC4180Tokenizer(tokenizerOptions); var parserOptions = new CsvParserOptions(true, tokenizer); var mapper = new CsvCatalogItemMapper(); var parser = new CsvParser <CatalogItem>(parserOptions, mapper); return(parser); }
public static Parser <TEntity> Create <TEntity, TMapping>(TMapping mapping) where TEntity : class, new() where TMapping : CsvMapping <TEntity> { var options = new Options('"', '\\', ','); var tokenizer = new RFC4180Tokenizer(options); var parserOptions = new CsvParserOptions(true, "//", tokenizer); var readerOptions = new CsvReaderOptions(new[] { "\r\n", "\n" }); var parser = new CsvParser <TEntity>(parserOptions, mapping); return(new Parser <TEntity>(parser, readerOptions)); }
private IEnumerable <Location> Get(string url) { var tokenizer = new RFC4180Tokenizer(new Options('"', '\\', ',')); CsvParserOptions csvParserOptions = new CsvParserOptions(true, tokenizer); LocationCsvMapping csvMapper = new LocationCsvMapping(); CsvParser <Location> csvParser = new CsvParser <Location>(csvParserOptions, csvMapper); CsvReaderOptions csvReaderOptions = new CsvReaderOptions("\n"); var result = csvParser .ReadFromUrl(csvReaderOptions, url) .ToList() .Where(d => d.IsValid) .Select(d => d.Result); return(result); }
/// <summary> /// Gets the data in the configuration file /// </summary> /// <returns></returns> public IEnumerable <IConfigData> LoadConfig() { if (!File.Exists(filePath)) { LogHelper.Warn(typeof(IPConfigService), "Config file couldn't be found."); return(Enumerable.Empty <IpConfigData>()); } var lines = new List <IpConfigData>(); try { var options = new Options('"', '\\', ','); var tokenizer = new RFC4180Tokenizer(options); CsvParserOptions csvParserOptions = new CsvParserOptions(skipHeader: true, tokenizer: tokenizer); CsvIpConfigDataMapping csvMapper = new CsvIpConfigDataMapping(); CsvParser <IpConfigData> csvParser = new CsvParser <IpConfigData>(csvParserOptions, csvMapper); var data = csvParser.ReadFromFile(filePath, Encoding.UTF8).ToList(); foreach (var line in data) { if (line.IsValid) { lines.Add(line.Result); } else { LogHelper.Warn(typeof(IPConfigService), "Error parsing IpRestrictor configuration item."); } } } catch (Exception ex) { LogHelper.Error(typeof(IPConfigService), "Error saving configuration", ex); throw new Exception("Error saving configuration", ex); } return(lines.ToArray()); }
public void All_Empty_Last_Column_Not_Empty_Test() { // Use a " as Quote Character, a \\ as Escape Character and a , as Delimiter. var options = new Options('"', '\\', ','); // Initialize the Rfc4180 Tokenizer: var tokenizer = new RFC4180Tokenizer(options); // Initialize a String with Double Quoted Data: var line = ",,a"; // Split the Line into its Tokens: var tokens = tokenizer.Tokenize(line); // And make sure the Quotes are retained: Assert.IsNotNull(tokens); Assert.AreEqual(3, tokens.Length); Assert.AreEqual("", tokens[0]); Assert.AreEqual("", tokens[1]); Assert.AreEqual("a", tokens[2]); }
public void Rfc4180_Issue3_Empty_Column_Test() { // Use a " as Quote Character, a \\ as Escape Character and a , as Delimiter. var options = new Options('"', '\\', ','); // Initialize the Rfc4180 Tokenizer: var tokenizer = new RFC4180Tokenizer(options); // Initialize a String with Double Quoted Data: var line = "\"Robert, Willliamson\", , \"All-around nice guy who always says hi\""; // Split the Line into its Tokens: var tokens = tokenizer.Tokenize(line); // And make sure the Quotes are retained: Assert.IsNotNull(tokens); Assert.AreEqual(3, tokens.Length); Assert.AreEqual("Robert, Willliamson", tokens[0]); Assert.AreEqual("", tokens[1]); Assert.AreEqual("All-around nice guy who always says hi", tokens[2]); }
public void RFC4180_CsvParser_Integration_Test() { // Use a " as Quote Character, a \\ as Escape Character and a , as Delimiter. var options = new Options('"', '\\', ','); // Initialize the Rfc4180 Tokenizer: var tokenizer = new RFC4180Tokenizer(options); // Now Build the Parser: CsvParserOptions csvParserOptions = new CsvParserOptions(true, tokenizer); SampleEntityMapping csvMapper = new SampleEntityMapping(); CsvParser <SampleEntity> csvParser = new CsvParser <SampleEntity>(csvParserOptions, csvMapper); var stringBuilder = new StringBuilder() .AppendLine("Name, Age, Description") .AppendLine("\"Michael, Chester\",24,\"Also goes by \"\"Mike\"\", among friends that is\"") .AppendLine("\"Robert, Willliamson\", , \"All-around nice guy who always says hi\""); // Define the NewLine Character to split at: CsvReaderOptions csvReaderOptions = new CsvReaderOptions(new[] { Environment.NewLine }); var result = csvParser .ReadFromString(csvReaderOptions, stringBuilder.ToString()) .ToList(); Assert.AreEqual(2, result.Count); Assert.AreEqual(true, result.All(x => x.IsValid)); Assert.AreEqual("Michael, Chester", result[0].Result.Name); Assert.AreEqual(24, result[0].Result.Age); Assert.AreEqual("Also goes by \"Mike\", among friends that is", result[0].Result.Description); Assert.AreEqual("Robert, Willliamson", result[1].Result.Name); Assert.AreEqual(false, result[1].Result.Age.HasValue); Assert.AreEqual("All-around nice guy who always says hi", result[1].Result.Description); }
public void Rfc4180_QuotedString_Double_Quoted_Data_Test() { // Use a " as Quote Character, a \\ as Escape Character and a , as Delimiter. var options = new Options('"', '\\', ','); // Initialize the Rfc4180 Tokenizer: var tokenizer = new RFC4180Tokenizer(options); // Initialize a String with Double Quoted Data: var line = "\"Michael, Chester\",24,\"Also goes by \"\"Mike\"\", among friends that is\""; // Split the Line into its Tokens: var tokens = tokenizer.Tokenize(line); // And make sure the Quotes are retained: Assert.IsNotNull(tokens); Assert.AreEqual(3, tokens.Length); Assert.AreEqual("Michael, Chester", tokens[0]); Assert.AreEqual("24", tokens[1]); Assert.AreEqual("Also goes by \"Mike\", among friends that is", tokens[2]); }
public IEnumerable <Observation> GetObservations() { var confirmed = GetConfirmedCasesFromGithubAsync().Result .Split(new[] { '\n' }, StringSplitOptions.None) .ToList(); var deaths = GetDeathCasesFromGithubAsync().Result .Split(new[] { '\n' }, StringSplitOptions.None) .ToList(); var recovered = GetRecoveredCasesFromGithubAsync().Result .Split(new[] { '\n' }, StringSplitOptions.None) .ToList(); // Make sure all data has the same header, so the Timestamps match: if (!new[] { deaths[0], recovered[0] }.All(x => string.Equals(x, confirmed[0], StringComparison.InvariantCulture))) { throw new Exception($"Different Headers (Confirmed = {confirmed[0]}, Deaths = {deaths[0]}, Recovered = {recovered[0]}"); } // Make sure all data has the same number of rows, or we can stop here: if (!new[] { deaths.Count, recovered.Count }.All(x => x == confirmed.Count)) { throw new Exception($"Different Number of Rows (Confirmed = {confirmed.Count}, Deaths = {deaths.Count}, Recovered = {recovered.Count}"); } var tokenizer = new RFC4180Tokenizer(new Options('"', '\\', ',')); // Get Header Row: var header = tokenizer.Tokenize(confirmed[0]) .ToArray(); // Get the TimeStamps: var observationDateTimes = header .Skip(4) .Select(x => DateTime.Parse(x, CultureInfo.InvariantCulture)) .ToArray(); // Now create a Lookup on the Raw Datas Province and Country: var confirmedLookup = confirmed.Skip(1) .Select(x => tokenizer.Tokenize(x)) .Where(x => x.Length == header.Length) .ToDictionary(x => $"{x[0]},{x[1]}", x => x); var deathsLookup = deaths.Skip(1) .Select(x => tokenizer.Tokenize(x)) .Where(x => x.Length == header.Length) .ToDictionary(x => $"{x[0]},{x[1]}", x => x); var recoveredLookup = recovered.Skip(1) .Select(x => tokenizer.Tokenize(x)) .Where(x => x.Length == header.Length) .ToDictionary(x => $"{x[0]},{x[1]}", x => x); // Get all keys we want to iterate over: var keys = confirmedLookup.Keys .Concat(deathsLookup.Keys) .Concat(recoveredLookup.Keys) .Distinct() .ToList(); foreach (var key in keys) { var confirmedValues = confirmedLookup[key]; var deathValues = deathsLookup[key]; var recoveredValues = recoveredLookup[key]; for (int timeStepIdx = 0; timeStepIdx < observationDateTimes.Length; timeStepIdx++) { yield return(new Observation { Province = confirmedLookup[key][0], Country = confirmedLookup[key][1], Lat = double.Parse(confirmedLookup[key][2].Trim(), CultureInfo.InvariantCulture), Lon = double.Parse(confirmedLookup[key][3].Trim(), CultureInfo.InvariantCulture), Timestamp = observationDateTimes[timeStepIdx], Confirmed = GetCountSafe(confirmedValues[timeStepIdx + 4]), Deaths = GetCountSafe(deathValues[timeStepIdx + 4]), Recovered = GetCountSafe(recoveredValues[timeStepIdx + 4]) }); } } }