private IEnumerable <Document> ReadCsv() { using (var reader = new StreamReader(path)) using (var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture)) { return(csv.GetRecords <Document>().ToList()); } }
public IEnumerable <T> GetRecords <T>() { IEnumerable <T> records = null; CsvHelper.CsvReader csvReader = OpenCsvReader <T>(_csvPath); records = csvReader.GetRecords <T>(); return(records); }
public List <T> Read <T, M>(TextReader file) where T : class where M : class { var csv = new CsvHelper.CsvReader(file); DefaultConfiguration(csv); csv.Configuration.RegisterClassMap(typeof(M)); return(csv.GetRecords <T>().ToList()); }
public FileInitializedInMemoryDataStore(string fileName) { using (StreamReader sr = new StreamReader(fileName)) { CsvReader reader = new CsvHelper.CsvReader(sr); reader.Configuration.Delimiter = ","; reader.Configuration.RegisterClassMap <HotelInfoMap>(); hotels = reader.GetRecords <HotelInfo>().ToList(); } }
public List <T> Read <T>(TextReader file) where T : class { Configuration c = new Configuration() { BadDataFound = null }; var csv = new CsvHelper.CsvReader(file, c); DefaultConfiguration(csv); return(csv.GetRecords <T>().ToList()); }
public static void ReadAddressBookCsv() { string path = @"C:\Users\prajv\source\repos\AddressBookDay13\AddressBook.csv"; var reader = new StreamReader(path); var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture); var records = csv.GetRecords <AddressBook>().ToList(); foreach (AddressBook contact in AddressBook.Records) { Console.WriteLine("FullName : " + contact.firstName + " " + contact.lastName); } }
public IEnumerable <T> LoadFile <T>(string inputFile) { IEnumerable <T> records; using (var fileReader = File.OpenText(inputFile)) { CsvHelper.CsvReader r = new CsvHelper.CsvReader(fileReader); records = r.GetRecords <T>().ToList(); } return(records); }
public void Import(string fileName) { if (!File.Exists(fileName)) return; using (var stream = new StreamReader(fileName)) { var reader = new CsvHelper.CsvReader(stream); _transactions = reader.GetRecords<ImportRecord>().OrderBy(x => x.Date).ThenByDescending(x => x.Amount).ToList(); } UpdateTransactions(); UpdateAccountBalances(); }
public List <T> Read <T>(string filename) { var path = basePath + "/" + filename; using (var reader = new StreamReader(path)) using (var csv = new _CsvHelper.CsvReader(reader)) { csv.Configuration.PrepareHeaderForMatch = (string header, int index) => header.ToLower(); csv.Configuration.Delimiter = ","; var records = csv.GetRecords <T>().ToList(); return(records.ToList()); } }
public void CsvHelper() { long poorHash = 0; using (var fs = File.OpenRead(Path)) using (var reader = new StreamReader(fs)) using (var csv = new CSH.CsvReader(reader)) { foreach (var row in csv.GetRecords <Row>().Take(TakeRows)) { poorHash += row.CreationDate; } } //System.Diagnostics.Debug.WriteLine("CSVHelper: " + poorHash); }
private bool LoadStudentFile(string path) { bool return_value = false; var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); var sr = new StreamReader(fs); var csv = new CsvHelper.CsvReader(sr); csv.Configuration.MissingFieldFound = null; csv.Configuration.BadDataFound = null; IEnumerable <COEStudentRecord> records = csv.GetRecords <COEStudentRecord>(); _StudentRecords = records.ToList(); Debug.WriteLine(_StudentRecords.Count); return_value = true; return(return_value); }
public static IEnumerable <ContractVolume> GetContractVolumes(DateTime folderDate) { ContractVolumeFolder = DirectoryNames.GetDirectoryName("ttapiContractVolume") + DirectoryNames.GetDirectoryExtension(BusinessDays.GetBusinessDayShifted(-1)); var sr = new StreamReader(ContractVolumeFolder + "/ContractList.csv"); var reader = new CsvHelper.CsvReader(sr); reader.Configuration.RegisterClassMap(new MyClassMap()); ContractVolumeList = reader.GetRecords <ContractVolume>().ToList(); for (int i = 0; i < ContractVolumeList.Count(); i++) { string[] words = ContractVolumeList[i].instrumentName.Split(); ContractVolumeList[i].SeriesKey = words[words.Count() - 1]; } return(ContractVolumeList); }
/// Add a string of text to a table public void AddStringTable(string text) { // Create the dictionary that will contain these values var stringTable = new Dictionary <string, string>(); using (var reader = new System.IO.StringReader(text)) { using (var csv = new CsvHelper.CsvReader(reader)) { var records = csv.GetRecords <Yarn.LocalisedLine>(); foreach (var record in records) { stringTable[record.LineCode] = record.LineText; } } } AddStringTable(stringTable); }
public Task <object> Evaluate(string typeName, string str) { if (string.IsNullOrEmpty(typeName)) { throw new ArgumentNullException($"{nameof(typeName)}"); } var type = AppDomain.CurrentDomain.GetAssemblies() .Select(a => a.GetTypes().FirstOrDefault(t => t.Name == typeName)).FirstOrDefault(t => t != null); if (type == null) { throw new ArgumentException($"Type not found: {typeName}"); } using var reader = new StringReader(str); using var csv = new CsvHelper.CsvReader(reader, csvConfig); return(Task.FromResult <object>(csv.GetRecords(type).ToList())); }
public IEnumerable <EmployeeDetails> Read() { var records = new List <EmployeeDetails>(); try { using (var csvReader = new CsvHelper.CsvReader(_textReader)) { csvReader.Configuration.HasHeaderRecord = true; records = csvReader.GetRecords <EmployeeDetails>().ToList(); } } catch (Exception ex) { _logger.Error(ex); throw; } return(records); }
/// <summary> /// Queries the reporting endpoint with the specified filters and interpolated classes /// </summary> /// <typeparam name="T"></typeparam> /// <typeparam name="U"></typeparam> /// <param name="reportingFilters"></param> /// <param name="betaEndPoint"></param> /// <returns></returns> private ICollection <T> QueryBetaOrCSVMap <T, U>(QueryFilter reportingFilters, bool betaEndPoint = false) where T : JSONResult where U : CSVConfig.ClassMap { var successOrWillTry = false; var results = new List <T>(); if (betaEndPoint) { // Switch to JSON Output try { reportingFilters.FormattedOutput = ReportUsageFormatEnum.JSON; var activityresults = ResponseReader.RetrieveData <T>(reportingFilters); results.AddRange(activityresults); successOrWillTry = true; } catch (Exception ex) { Logger.LogError(ex, $"Failed for JSON Format with message {ex.Message}"); } } if (!successOrWillTry) { // Switch to CSV Output reportingFilters.FormattedOutput = ReportUsageFormatEnum.Default; reportingFilters.BetaEndPoint = false; var CSVConfig = new CSVConfig.Configuration() { Delimiter = ",", HasHeaderRecord = true }; CSVConfig.RegisterClassMap <U>(); var resultscsv = new CSV.CsvReader(ResponseReader.RetrieveDataAsStream(reportingFilters), CSVConfig); results.AddRange(resultscsv.GetRecords <T>()); } Logger.LogInformation($"Found {results.Count} while querying successOrWillTry:{successOrWillTry}"); return(results); }
public List <Movie> ReadCSVFile(string location) { try { using (var reader = new StreamReader(location, Encoding.Default)) { using (var csv = new CsvHelper.CsvReader(reader, System.Globalization.CultureInfo.CurrentCulture)) { csv.Configuration.RegisterClassMap <Moviesmap>(); var records = csv.GetRecords <Movie>().ToList(); return(records); } } } catch (Exception e) { throw new Exception(e.Message); } }
/// <summary> /// This will return all the movies with the related ID and only show 1 per country /// </summary> /// <param name="id">Id of movie</param> /// <returns>Metadata array of all the movies found</returns> public Metadata[] FilterMovies(int id) { //Note: This reads the CVS file var reader = new StreamReader(Environment.CurrentDirectory + "/Docs/metadata.csv"); using (var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture)) { //Note: I am doing a LINQ query to try and sort the data var reslts = csv.GetRecords <Metadata>() .OrderBy(i => i.MovieId) //Note: "i.Duration.Length == 8" is set to make sure we only get a valid time based on the spec of having HH:MM:SS .Where(i => i.MovieId == id && i.Duration.Length == 8) //Note: This part was kinda new for me as I was still getting back 2 of the same county .Distinct(new DistinctItemComparer()) .Select(i => i).OrderBy(o => o.Language).ToArray(); return(reslts); } }
private void Button2_Click(object sender, EventArgs e) { var filename = @"D:\temp\action_windows-6axis\action_data_1.csv"; StreamReader SRFile = new StreamReader(filename); var csv = new CsvHelper.CsvReader(SRFile, CultureInfo.InvariantCulture); var records = csv.GetRecords <SensorData>().ToList(); SRFile.Close(); //关闭文件 List <double> df = new List <double>(); foreach (var data in records) { df.AddRange(data.getSensorDataList()); } double[] df1 = df.ToArray(); //Console.WriteLine(df1.Length); var df2 = np.array(df1); Shape newshape = ((df1.Length / 43), 43); //Console.WriteLine((string)dad); var dataMat = df2.reshape(newshape); Console.WriteLine(dataMat.Shape); //Console.WriteLine((string)dataMat[":,-1"].reshape(-1,1)); int i = 0; while (i < dataMat.size) { i++; Console.WriteLine((string)dataMat[i]); if (i > 5) { break; } } }
public static Landsat8CsvInfo[] GetLastLandsat8() { string[] codes = { "132023", "132024", "137022", "137023", "136022", "136023", "134023", "134024", "137021", "137022", "137023", "136021", "136022", "136023", "134021", "134022", "134023", "136021", "136022", "137020", "137021", "137022", "139019", "139020", "139021", "137022", "137023", "139021", "139022" }; using (var client = new WebClient()) { using (var netStream = client.OpenRead("https://landsat-pds.s3.amazonaws.com/c1/L8/scene_list.gz")) { using (var gzipStream = new System.IO.Compression.GZipStream(netStream, System.IO.Compression.CompressionMode.Decompress)) { using (var reader = new System.IO.StreamReader(gzipStream)) { using (var csvStream = new CsvHelper.CsvReader(reader, System.Globalization.CultureInfo.InvariantCulture)) { return(csvStream.GetRecords <Landsat8CsvInfo>().ToArray()); //return csvStream.GetRecords<Landsat8CsvInfo>().Where(c => c.cloudCover < 50 && c.acquisitionDate > DateTime.Now.AddDays(-120) && codes.Contains(c.path.ToString("D3") + c.row.ToString("D3"))); } } } } } }
static void Main(string[] args) { //assuming the unique key is in the first column var config = new CsvHelper.Configuration.Configuration { HasHeaderRecord = false, IgnoreBlankLines = true, TrimOptions = TrimOptions.Trim, IgnoreQuotes = true }; config.RegisterClassMap <RecordDataLayoutMap>(); using (var fileReader = System.IO.File.OpenText(@"C:\temp\WaDEImportFiles\sites.csv")) using (var csvReader = new CsvHelper.CsvReader(fileReader, config)) { var allRecords = csvReader.GetRecords <RecordData>(); System.IO.File.WriteAllLines(@"C:\temp\WaDEImportFiles\sites_deduped.csv", allRecords.GroupBy(a => a.Key.ToLower()) .Select(a => a.First()) .OrderBy(a => a.RecordNumber) .Select(a => a.Record)); } }
/// <summary> /// UC14- Reading the stored contacts to the address book file /// </summary> /// <param name="addressBook"></param> public static void ReadCSVFile(AddressBook addressBook) { try { string csvFilePath = @$ "F:\Program files(x64)\Microsoft Visual Studio\BridgeLabzAssignments\AddressBookProblem-FileIOCSV\{addressBook.nameOfAddressBook}AddressBookCSV.csv"; /// Create a new object of the StreamReader class and initialise the file path var reader = new StreamReader(csvFilePath); /// Creates a new CSV reader instance var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture); csv.Configuration.Delimiter = ","; /// Store the records in the records to be invoked by GetRecords method var records = csv.GetRecords <ContactDetails>().ToList(); /// Iterating over the records to display the contacts foreach (ContactDetails contact in records) { Console.WriteLine("\nFullName: " + contact.firstName + " " + contact.secondName + "\nAddress: " + contact.address + "\nCity: " + contact.city + "\nState: " + contact.state + "\nZip: " + contact.zip + "\nPhoneNumber: " + contact.phoneNumber + "\nEmail: " + contact.emailId + "\n"); } /// Close the stream reader so as to avoid any conflict with reopen reader.Close(); }
public static IEnumerable <object> Read(Type type, string path) { try { using var reader = new StreamReader(path, EncodesResolver.Resolve(type)); using var csvReader = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture); csvReader.Configuration.HasHeaderRecord = CsvHasHeaderResolver.Resolve(type); return(csvReader.GetRecords(type).ToList()); } catch (Exception ex) when(ex is ValidationException || ex is BadDataException) { throw new CsvIOException("CSVファイルの書式が不正です", ex); } catch (IOException ex) { throw new CsvIOException("CSVファイルを読み込めません", ex); } catch (Exception ex) { throw new CsvIOException("CSVファイル読み込み時に不明なエラーが発生しました", ex); } }
public static IEnumerable <TModel> Read <TModel, TMapping>(string path, string separator, Encoding encoding, string culture, bool firstLineHasColumn = true) where TModel : class, new() where TMapping : ClassMap, new() { var records = new List <TModel>(); var inputFileDescription = new CsvConfiguration(new CultureInfo(culture)) { Delimiter = separator, HasHeaderRecord = firstLineHasColumn, Encoding = encoding, IgnoreBlankLines = true }; using (var fs = new StreamReader(path)) using (var csv = new CsvHelper.CsvReader(fs, inputFileDescription)) { csv.Context.RegisterClassMap <TMapping>(); records = csv.GetRecords <TModel>().ToList(); } return(records); }
private Task <IEnumerable <CaseStatus> > ReadInternal(string filePath) { var result = new List <CaseStatus>(); CultureInfo provider = CultureInfo.InvariantCulture; using (var reader = new StreamReader(filePath)) using (var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture)) { var dataRead = csv.GetRecords <dynamic>().ToList(); foreach (var item in dataRead) { if (string.IsNullOrWhiteSpace(item.Date)) { continue; } var valueDictionary = new RouteValueDictionary(item); result.AddRange(Enum.GetNames(typeof(District)).Select(x => new CaseStatus { District = (District)Enum.Parse(typeof(District), x), Date = System.DateTime.ParseExact(item.Date, "dd-MM-yyyy", provider), Count = int.TryParse((string)valueDictionary[x], out var value) ? value : 0 }));
private bool LoadAssessmentFile(string path) { bool return_value = false; var fs = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); var sr = new StreamReader(fs); var csv = new CsvHelper.CsvReader(sr); csv.Configuration.MissingFieldFound = null; csv.Configuration.BadDataFound = null; IEnumerable <AssessmentRowWithPosition> records = csv.GetRecords <AssessmentRowWithPosition>(); _AssessmentRows = records.ToList(); _AssessmentRows.RemoveAll(x => x.Rubric_Cell_Score == "NULL"); return_value = true; //Debug.WriteLine("Assessment Data loaded: " + _AssessmentRows.Count); //This is important. Trying to correct an error in the rubric data. Will review with Lisa B when I have a chance. _AssessmentRows.RemoveAll(x => x.Rubric_Cell_Score == "NULL"); StudentIds = _AssessmentRows.Select(x => x.STUDENT_ID).Distinct().ToList(); return(return_value); }
/// <summary> /// Reads records from csv file. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="filePath">Absolute or relative file path</param> /// <param name="csvConfiguration">Csv configuration. If it is null, then the default options will be used.</param> /// <param name="progressChanged">Callback to report progress</param> /// <param name="operation">Progress report title text</param> /// <param name="cntRecords">Number of read records</param> /// <returns></returns> public static IList <T> ReadFromFile <T>(string filePath, Configuration csvConfiguration, System.Action <string> progressChanged, string operation, out int cntRecords) { string progressTitle = $"{operation} {nameof(ReadFromFile)} {filePath.TrimPath()}"; progressChanged?.Invoke(progressTitle); List <T> cache = new List <T>(); using (StreamReader reader = new StreamReader(filePath)) using (CsvHelper.CsvReader csv = new CsvHelper.CsvReader(reader, csvConfiguration ?? CsvFile.DefaultConfiguration)) { csv.Configuration.PrepareHeaderForMatch = (string header, int index) => header.Replace(" ", ""); IEnumerable <T> records = csv.GetRecords <T>(); if (progressChanged == null) { cache.AddRange(records); cntRecords = cache.Count; } else { cntRecords = Batch.ProcessRecords(records, (item) => { cache.Add(item); }, progressChanged, progressTitle); progressChanged($"{progressTitle} {cntRecords:N0}"); } } return(cache); }
public CountryHelper(string fileLocation) { // Using CsvHelper instead of my own implementation using (var reader = new StreamReader(fileLocation)) using (var csv = new CsvHelper.CsvReader(reader, CultureInfo.InvariantCulture)) { Console.WriteLine($"Reading countries from {fileLocation}..."); try { csv.Configuration.HasHeaderRecord = false; csv.Read(); Records = csv.GetRecords <Country>().ToList(); } catch (FileNotFoundException e) { Console.WriteLine(e.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine($"Success! Found {Records.Count} countries in the file."); } }
/// <summary> /// import the mapings specified in the file into the given mappingSet /// </summary> /// <param name="mappingSet">the mappingset to import the mappings into</param> /// <param name="filePath">the path to the file containing the mappings</param> public static void importMappings(MappingSet mappingSet, string filePath, Model model) { IEnumerable <CSVMappingRecord> mappingRecords; //remove all existing mappings foreach (var mapping in mappingSet.mappings) { mapping.delete(); } //map source and target to be sure mappingSet.source.mapTo(mappingSet.target); //make sure the target node tree has been build ((MappingNode)mappingSet.target).buildNodeTree(); //read the csv file using (var textReader = new StreamReader(filePath)) { var csv = new CSV.CsvReader(textReader, false); csv.Configuration.RegisterClassMap <CSVMappingRecordMap>(); csv.Configuration.Delimiter = ";"; mappingRecords = csv.GetRecords <CSVMappingRecord>(); var sourceNodes = new Dictionary <string, MP.MappingNode>(); var targetNodes = new Dictionary <string, MP.MappingNode>(); //now loop the records foreach (var csvRecord in mappingRecords) { if (string.IsNullOrEmpty(csvRecord.sourcePath) || (string.IsNullOrEmpty(csvRecord.targetPath) && string.IsNullOrEmpty(csvRecord.mappingLogic))) { //don't even bother if not both fields are filled in continue; } //convert any newLines (\n") coming from excel (Alt-Enter) to "real" newlines csvRecord.mappingLogic = csvRecord.mappingLogic.Replace("\n", Environment.NewLine); //find the source //first check if we already known the node MP.MappingNode sourceNode = null; if (!string.IsNullOrEmpty(csvRecord.sourcePath) && !sourceNodes.TryGetValue(csvRecord.sourcePath, out sourceNode)) { //find out if we know a parent node of this node var parentNode = findParentNode(sourceNodes, csvRecord.sourcePath); if (parentNode == null) { //no parent found, start at the top parentNode = mappingSet.source; } //find the node from the parent sourceNode = parentNode.findNode(csvRecord.sourcePath.Split('.').ToList()); } if (sourceNode == null) { EAOutputLogger.log($"Could not find source element corresponding to '{csvRecord.sourcePath}'", 0, LogTypeEnum.warning); //don't bother going any further continue; } //find the target MP.MappingNode targetNode = null; //first check if we already known the node if (!string.IsNullOrEmpty(csvRecord.targetPath) && !targetNodes.TryGetValue(csvRecord.targetPath, out targetNode)) { //find out if we know a parent node of this node var parentNode = findParentNode(targetNodes, csvRecord.targetPath); if (parentNode == null) { //no parent found, start at the top parentNode = mappingSet.target; } //find the node from the parent targetNode = parentNode.findNode(csvRecord.targetPath.Split('.').ToList()); if (targetNode == null) { EAOutputLogger.log($"Could not find target element corresponding to '{csvRecord.targetPath}'", 0, LogTypeEnum.warning); } } //if we found both then we map them if (sourceNode != null) { if (targetNode != null) { var newMapping = sourceNode.mapTo(targetNode); newMapping.mappingLogics = createMappingLogicsFromCSVString(csvRecord.mappingLogic, mappingSet.EAContexts, model); newMapping.save(); EAOutputLogger.log($"Mapping created from '{csvRecord.sourcePath}' to '{csvRecord.targetPath}'", 0); } else { var newMapping = sourceNode.createEmptyMapping(false); newMapping.mappingLogics = createMappingLogicsFromCSVString(csvRecord.mappingLogic, mappingSet.EAContexts, model); newMapping.save(); EAOutputLogger.log($"Empty mapping created for '{csvRecord.sourcePath}' ", 0); } } } } }
public ActionResult Upload(HttpPostedFileWrapper file) { if (file == null) { var model = new ClientsIndexModel(); ViewBag.ImportApprovalStatusError = "Please select a file."; return(View("Index", model)); } var id = Guid.NewGuid(); string fileName = System.IO.Path.Combine(System.IO.Path.GetTempPath(), id.ToString()); var csvConf = new CsvHelper.Configuration.CsvConfiguration() { IsStrictMode = false, IsCaseSensitive = false, SkipEmptyRecords = true }; csvConf.ClassMapping <ApprovalStatusCsvMap>(); using (var csvReader = new CsvHelper.CsvReader(new System.IO.StreamReader(file.InputStream), csvConf)) { var updatedAt = DateTime.Now; var updatedBy = this.Permissions.User.Id; var csvChunkSize = 10000; var recordIndex = 1; Dictionary <int, int> fsas = new Dictionary <int, int>(); using (var db = new ccEntities()) { db.Imports.AddObject(new CC.Data.Import() { Id = id, StartedAt = DateTime.Now, UserId = this.Permissions.User.Id }); db.SaveChanges(); var q = (from fs in db.FundStatuses join a in db.ApprovalStatuses on fs.ApprovalStatusName equals a.Name select new { fsid = fs.Id, asid = a.Id } ); foreach (var intem in q) { fsas.Add(intem.fsid, intem.asid); } } foreach (var csvChunk in csvReader.GetRecords <ImportClient>().Split(csvChunkSize)) { string connectionString = System.Data.SqlClient.ConnectionStringHelper.GetProviderConnectionString(); using (var sqlBulk = new System.Data.SqlClient.SqlBulkCopy(connectionString, SqlBulkCopyOptions.KeepNulls)) { foreach (var record in csvChunk) { record.RowIndex = recordIndex++; record.ImportId = id; if (record.FundStatusId.HasValue && fsas.ContainsKey(record.FundStatusId.Value)) { record.ApprovalStatusId = fsas[record.FundStatusId.Value]; } record.UpdatedAt = updatedAt; record.UpdatedById = updatedBy; } var dataTable = csvChunk.ToDataTable(); var q = dataTable.Columns.OfType <System.Data.DataColumn>().Where(f => f.DataType == typeof(Int32)).Select(f => new { c = f.ColumnName, values = dataTable.Rows.OfType <System.Data.DataRow>().Select((r, i) => r[f.ColumnName]) }); sqlBulk.DestinationTableName = "ImportClients"; sqlBulk.NotifyAfter = 1000; sqlBulk.ColumnMappings.Add("ClientId", "ClientId"); sqlBulk.ColumnMappings.Add("FundStatusId", "FundStatusId"); sqlBulk.ColumnMappings.Add("RowIndex", "RowIndex"); sqlBulk.ColumnMappings.Add("ImportId", "ImportId"); sqlBulk.ColumnMappings.Add("UpdatedAt", "UpdatedAt"); sqlBulk.ColumnMappings.Add("UpdatedById", "UpdatedById"); sqlBulk.SqlRowsCopied += (s, e) => { System.Diagnostics.Debug.Write(e.RowsCopied); }; sqlBulk.WriteToServer(dataTable); } } } return(RedirectToAction("Preview", new { id = id })); }
//public static Contexts db = new Contexts(); public static void ReadFromFile(string FileName, string ProjectId, IServiceProvider serviceProvider) { List <string> fileNames = new List <string> { FileName }; foreach (string _fileName in fileNames) { using (var reader = new StreamReader(@"MSR2019/" + _fileName)) using (var csv = new CsvHelper.CsvReader(reader)) { var _d = csv.GetRecords <dynamic>(); using (var context = new Contexts( serviceProvider.GetRequiredService < DbContextOptions <Contexts> >())) { foreach (var _s in _d) { Project _p = new Project(); _p.ProjectAdditionalDetails = new List <ProjectAdditionalDetails>(); _p.GId = Guid.NewGuid(); _p.Project_Id = Guid.Parse(ProjectId); foreach (var _ss in _s) { //id Summary Depends.on Duplicates Modified Product Version Reported Status Blocks Commit_ID description author files created_at if (_ss.Key == "id") { if (_ss.Value.Length > 0) { _p.Id = int.Parse(_ss.Value); } } else if (_ss.Key == "Summary") { _p.Summary = _ss.Value; } else if (_ss.Key == "Depends.on") { _p.DependsOn = _ss.Value; } else if (_ss.Key == "Duplicates") { _p.Duplicates = _ss.Value; } else if (_ss.Key == "Modified") { if (_ss.Value.Length > 5) { string _dateS = _ss.Value.Substring(0, 16) + ":00"; _p.Modified = DateTime.Parse(_dateS); } } else if (_ss.Key == "Product") { _p.Product = _ss.Value; } else if (_ss.Key == "Version") { _p.Version = _ss.Value; } else if (_ss.Key == "Reported") { if (_ss.Value.Length > 5) { string _dateR = _ss.Value.Substring(0, 16) + ":00"; _p.Reported = DateTime.Parse(_dateR); } } else if (_ss.Key == "Status") { _p.Status = _ss.Value; } else if (_ss.Key == "Blocks") { _p.Blocks = _ss.Value; } else if (_ss.Key == "Commit_ID") { _p.Commit_ID = _ss.Value; } else if (_ss.Key == "description") { _p.Description = _ss.Value; } else if (_ss.Key == "author") { _p.Author = _ss.Value; } else if (_ss.Key == "files") { _p.Files = _ss.Value; } else if (_ss.Key == "created_at") { if (_ss.Value.Length > 5) { string _dateC = _ss.Value.Substring(0, 16) + ":00"; _p.CreatedAt = DateTime.Parse(_dateC); } } else { if (_ss.Value != "NA") { ProjectAdditionalDetails _pad = new ProjectAdditionalDetails { GId = Guid.NewGuid(), Key = _ss.Key, Value = _ss.Value, Project_Id = _p.GId }; _pad.Project_Id = _p.GId; _p.ProjectAdditionalDetails.Add(_pad); } } } context.Add(_p); Console.WriteLine(_p); } context.SaveChanges(); } } } }