public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); // 64 should fully cover the values in the dataset. var stringPool = new StringPool(64); using (var reader = new StreamReader(stream)) { var options = new CsvDataReaderOptions { HasHeaders = false, BufferSize = 0x10000, StringFactory = stringPool.GetString, }; var csvReader = CsvDataReader.Create(reader, options); while (csvReader.Read()) { var record = activate(); record.Read(i => csvReader.GetString(i)); allRecords.Add(record); } } return(allRecords); }
/// <summary> /// GameOnOnGameLoad - Load Every Plugin/Addon /// </summary> /// <param name="args"></param> private static void GameOnOnGameLoad(EventArgs args) { #region Subscriptions Game.PrintChat( "<font color='#0993F9'>[SurvivorSeries AIO]</font> <font color='#FF8800'>Successfully Loaded.</font>"); Game.PrintChat("<font color='#b756c5'>[SurvivorSeries] NEWS: </font>" + SSNews); #endregion RootMenu = new RootMenu("SurvivorSeries AIO"); #region Utility Loads new VersionCheck.VersionCheck().UpdateCheck(); SpellCast.RootConfig = RootMenu; ChampionFactory.Load(ObjectManager.Player.ChampionName, RootMenu); ActivatorFactory.Create(ObjectManager.Player.ChampionName, RootMenu); AutoLevelerFactory.Create(ObjectManager.Player.ChampionName, RootMenu); #endregion }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); string text; using (var reader = new StreamReader(stream)) { text = reader.ReadToEnd(); } if (!string.IsNullOrEmpty(text)) { var doc = ComLib.CsvParse.Csv.LoadText(text, false); foreach (var row in doc.Parse()) { var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); // This only works for data with exactly 25 columns. // You must either provide the column types, or the types will be // guessed. Can't allow guessing, because the round-trip back to string doesn't preserve the exact text. // Must know the number of columns to provide the schema, so this isn't general-purpose for any <T>. DataFrame frame; try { frame = DataFrame.LoadCsv(stream, header: false, guessRows: 0, dataTypes: types); } catch (FormatException e) { if (e.Message == "Empty file") { return(allRecords); } throw; } foreach (var row in frame.Rows) { var record = activate(); record.Read(i => row[i].ToString()); allRecords.Add(record); } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { // bit of a hack, since this only works for T == PackageAsset var engine = new global::FileHelpers.FileHelperAsyncEngine <PackageAssetData>(); using (engine.BeginReadStream(reader)) { foreach (var item in engine) { // it seems like it would be slow to create a PackageAssetData // and then subsequently copy all the fields to a PackageAsset // but this approach is actually faster than having FileHelpers // bind directly to the PackageAsset. var record = activate(); record.Read(i => item.GetString(i)); allRecords.Add(record); } } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var config = new ChoETL.ChoCSVRecordConfiguration { FileHeaderConfiguration = new ChoETL.ChoCSVFileHeaderConfiguration { HasHeaderRecord = false, }, }; using (var reader = new StreamReader(stream)) using (var csvReader = new global::ChoETL.ChoCSVReader(reader, config).AsDataReader()) { var count = 0; while (csvReader.Read()) { count++; var record = activate(); record.Read(i => csvReader.GetString(i)); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var csvReader = Sky.Data.Csv.CsvReader.Create(stream); foreach (var row in csvReader) { var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var parser = new global::FastCsvParser.CsvReader(stream, Encoding.UTF8)) { while (parser.MoveNext()) { var record = activate(); record.Read(i => parser.Current[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); using (var reader = new StreamReader(stream)) { var cols = new FSharpOption <FSharpFunc <Tuple <int, string>, FSharpOption <Type> > >(new Types()); var table = Table.Load(reader, new ReadSettings(Delimiter.Comma, false, false, FSharpOption <int> .None, cols)); var allRecords = new List <T>(table.RowsCount); for (int r = 0; r < table.RowsCount; r++) { var item = activate(); item.Read(i => table[i].Rows.Item(r).AsString); allRecords.Add(item); } return(allRecords); } }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var lines = EnumerateLines(reader); foreach (var row in lines.ParseCsv()) { var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new SoftCircuits.CsvParser.CsvReader(stream)) { string[] columns = null; while (reader.ReadRow(ref columns)) { var record = activate(); record.Read(i => columns[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) using (var txt = new global::TxtCsvHelper.Parser()) { while (reader.Peek() >= 0) { var strings = txt.MixedSplit(reader.ReadLine()).ToList(); var record = activate(); record.Read(i => strings[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) using (var csvReader = new LumenWorks.Framework.IO.Csv.CsvReader(reader, hasHeaders: false)) { while (csvReader.ReadNextRecord()) { var record = activate(); record.Read(i => csvReader[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var csvReader = new NReco.Csv.CsvReader(reader); while (csvReader.Read()) { var record = activate(); record.Read(i => csvReader[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { // this library only allows loading from a file. // so write to a local file, use the length of the memory stream // to write to a different file based on the input data // this will be executed during the first "warmup" run var file = "data" + stream.Length + ".csv"; if (!File.Exists(file)) { using var data = File.Create(file); stream.CopyTo(data); } var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var mlc = new MLContext(); using (var reader = new StreamReader(stream)) { var schema = new TextLoader.Column[25]; for (int i = 0; i < schema.Length; i++) { schema[i] = new TextLoader.Column("" + i, DataKind.String, i); } var opts = new TextLoader.Options() { HasHeader = false, Separators = new[] { ',' }, Columns = schema }; var l = mlc.Data.LoadFromTextFile(file, opts); var rc = l.GetRowCursor(l.Schema); var cols = l.Schema.ToArray(); var getters = cols.Select(c => rc.GetGetter <ReadOnlyMemory <char> >(c)).ToArray(); while (rc.MoveNext()) { var record = activate(); record.Read(i => { ReadOnlyMemory <char> s = null; getters[i](ref s); return(s.ToString()); }); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var parser = new NotVisualBasic.FileIO.CsvTextFieldParser(reader); while (!parser.EndOfData) { var fields = parser.ReadFields(); var record = activate(); record.Read(i => fields[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var fields = new List <string>(); var builder = new StringBuilder(); using (var reader = new StreamReader(stream)) { while (CsvUtility.TryReadLine(reader, fields, builder)) { var record = activate(); record.Read(i => fields[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { string line; while ((line = reader.ReadLine()) != null) { var pieces = line.Split(','); var record = activate(); record.Read(i => pieces[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var csvReader = new global::FlatFiles.SeparatedValueReader(reader); while (csvReader.Read()) { var values = csvReader.GetValues(); var record = activate(); record.Read(i => values[i]?.ToString() ?? string.Empty); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var splitter = new global::FluentCsv.CsvParser.Splitters.Rfc4180DataSplitter(); string line; while ((line = reader.ReadLine()) != null) { var row = splitter.SplitColumns(line, ","); var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var parser = new TextFieldParser(stream)) { parser.Delimiters = new[] { "," }; while (!parser.EndOfData) { var fields = parser.ReadFields(); var record = activate(); record.Read(i => fields[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var stringPool = new StringPool(128); using (var reader = new StreamReader(stream)) using (var csvReader = new CsvReader(reader)) { while (csvReader.HasMoreRecords) { var row = csvReader.ReadDataRecord(); var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { string line; while ((line = reader.ReadLine()) != null) { var record = activate(); var fields = ServiceStack.Text.CsvReader.ParseFields(line); // Empty fields are returned as null by this library. Convert that to empty string to be more // consistent with other libraries. record.Read(i => fields[i] ?? string.Empty); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var csvReader = new CsvReader(reader); IEnumerable <string> fields; while ((fields = csvReader.ReadNextRow()) != null) { var record = activate(); var enu = fields.GetEnumerator(); record.Read(i => { enu.MoveNext(); return(enu.Current); }); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var streamReader = new StreamReader(stream)) { var options = new CsvObjectOptions(); var csvReader = new CsvReader(streamReader, options); while (csvReader.Read()) { var record = activate(); // Empty fields are returned as null by this library. Convert that to empty string to be more // consistent with other libraries. record.Read(i => csvReader.CurrentRow[i].Value ?? string.Empty); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var options = new global::TinyCsvParser.Tokenizer.RFC4180.Options('"', '"', ','); var tokenizer = new global::TinyCsvParser.Tokenizer.RFC4180.RFC4180Tokenizer(options); string line; while ((line = reader.ReadLine()) != null) { var record = activate(); var fields = tokenizer.Tokenize(line); record.Read(i => fields[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var config = new global::CsvHelper.Configuration.CsvConfiguration(CultureInfo.InvariantCulture) { CacheFields = true, }; var csvParser = new global::CsvHelper.CsvParser(reader, config); while (csvParser.Read()) { var record = activate(); record.Read(i => csvParser[i]); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var reader = BuildReader(activate); string[] fields = null; // closure over fields only allocated once Func <int, string> getFields = i => fields[i]; var result = ProcessStream <T>(stream, spanLine => { fields = reader.Parse(spanLine); var record = activate(); record.Read(getFields); return(record); }); return(result); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); using (var reader = new StreamReader(stream)) { var desc = new CsvFileDescription { FirstLineHasColumnNames = false, }; var cc = new CsvContext(); foreach (var row in cc.Read <DataRow>(reader, desc)) { var record = activate(); record.Read(i => row[i].Value ?? string.Empty); allRecords.Add(record); } } return(allRecords); }
public List <T> GetRecords <T>(MemoryStream stream) where T : ICsvReadable, new() { var activate = ActivatorFactory.Create <T>(_activationMethod); var allRecords = new List <T>(); var config = new global::CSVFile.CSVSettings { HeaderRowIncluded = false, }; using (var reader = new StreamReader(stream)) using (var csvReader = new global::CSVFile.CSVReader(reader, config)) { foreach (var row in csvReader) { var record = activate(); record.Read(i => row[i]); allRecords.Add(record); } } return(allRecords); }