public async Task Given_DataFile_When_Reporting_Then_ProduceJsonReport() { // Arrange var datafile = Path.Combine(AppContext.BaseDirectory, "Data", "chicago-bulls.csv"); var expectedReportFile = Path.Combine(AppContext.BaseDirectory, "Data", "chicago-bulls.json"); var originalReport = await File.ReadAllTextAsync(expectedReportFile); var reportObject = JsonConvert.DeserializeObject(originalReport); var expectedReport = JsonConvert.SerializeObject(reportObject, Formatting.Indented); var playerParser = new PlayerParser(); var dataProvider = new CsvDataProvider(playerParser); var metricConverter = new ImperialToMetricConverter(); var enrichers = new List <IReportEnricher> { new AveragePlayerHeightEnricher(metricConverter), new AveragePointsForTeamEnricher(), new TeamEnricher(), new TopAchieversEnricher() }; var statisticsReporter = new TeamStatisticsReporter(enrichers); var sut = new TeamStatisticsJsonReporter(dataProvider, statisticsReporter); // Act var result = await sut.GetReportAsJsonAsync(datafile); // Assert Assert.Equal(expectedReport, result); }
public void provides_data() { var csvData = new List <List <string> > { new List <string> { "1" }, new List <string> { "2" } }; var mockMapper = new Mock <ICsvEntityMapper>(); mockMapper.Setup(m => m.MapToEntity(csvData[0])).Returns(new MockEntity { Id = 1 }); mockMapper.Setup(m => m.MapToEntity(csvData[1])).Returns(new MockEntity { Id = 2 }); var result = CsvDataProvider <MockEntity> .ProvideData(csvData, mockMapper.Object); Assert.Equal(result.Count, csvData.Count); Assert.IsType <MockEntity>(result[0]); Assert.Equal(1, result[0].Id); Assert.Equal(2, result[1].Id); }
public DataProviderTests() { _byteCount = _fileGenerator.GenerateAsync(Separator, RowCount).Result; var iterator = new CsvFileIterator(_fileGenerator.FilePath, Encoding.UTF8); _provider = new IteratorBasedDataProvider(iterator, Separator); }
public SequentialParser( CsvDataProvider dataProvider, Action <int, TModel> lineProcessedCallback ) : base(dataProvider) { _lineProcessedCallback = lineProcessedCallback; }
public void SetUp() { _csvParserMock = MockRepository.GenerateMock <IFileParser <List <string[]> > >(); _validatorMock = MockRepository.GenerateMock <IParsingResultValidator>(); _filePathProviderMock = MockRepository.GenerateMock <IDataFilePathProvider>(); _validationOperationResultLoggerMock = MockRepository.GenerateMock <IValidationOperationResultLogger>(); _csvDataProvider = new CsvDataProvider(_csvParserMock, _validatorMock, _filePathProviderMock, _validationOperationResultLoggerMock); }
private void OnEnable() { csvFile_prop = serializedObject.FindProperty("_csvFile"); seperator_prop = serializedObject.FindProperty("_seperator"); rowBasedLayout_prop = serializedObject.FindProperty("_rowBasedLayout"); seconFieldContainesDataType_prop = serializedObject.FindProperty("_seconFieldContainesDataType"); _provider = (CsvDataProvider)serializedObject.targetObject; }
private void PrepareInitialDataProvider() { CsvFileParser csvParser = new CsvFileParser(); ParsingResultValidator validator = new ParsingResultValidator(); InitialDataFilePathProvider filePathProviderMock = new InitialDataFilePathProvider { FilePath = _initialDataFilePath }; FileValidationOperationResultLogger resultLogger = new FileValidationOperationResultLogger(new FileOperations()); _initialDataProvider = new CsvDataProvider(csvParser, validator, filePathProviderMock, resultLogger); }
/// <summary> /// Initialize the agent and the stock exchange /// </summary> /// <param name="agentId"></param> protected void Initialize(long?agentId) { using (var DbContext = new DeepQStockContext()) { if (agentId.HasValue) { var agentParameters = DbContext.DeepRLAgentParameters .Include(a => a.QNetwork) .Include(a => a.StockExchange) .Single(a => a.Id == agentId.Value); if (agentParameters.Status == AgentStatus.Completed) { DbContext.ClearAgent(agentParameters); DbContext.SaveChanges(); } Parameters = agentParameters.StockExchange; RewardCalculator = RewardCalculator.Use(RewardCalculatorType.WinningsOverLoosings); DailyIndicators = InitializeIndicators(DbContext, PeriodType.Day); WeeklyIndicators = InitializeIndicators(DbContext, PeriodType.Week); MonthlyIndicators = InitializeIndicators(DbContext, PeriodType.Month); Agent = new DeepRLAgent(agentParameters); Agent.OnTrainingEpochComplete += (e, args) => RedisManager.Publish(RedisPubSubChannels.OnTrainingEpochComplete, JsonConvert.SerializeObject(args)); var experiences = DbContext.Experiences.Where(e => e.AgentId == agentParameters.Id).ToList(); Agent.SetExperiences(experiences); DataProvider = new CsvDataProvider(Parameters.CsvDataFilePath, Parameters.EpisodeLength); if (agentParameters.Status == AgentStatus.Paused) { CurrentState = DbContext.States.Include(s => s.InternalPeriods).Single(s => s.StockExchangeId == Parameters.Id); DataProvider.Seek(CurrentState.Today.Date.AddDays(1)); } if (agentParameters.Status == AgentStatus.Completed) { DbContext.ClearAgent(agentParameters); } agentParameters.Status = AgentStatus.Running; DbContext.SaveChanges(); } } }
public void LoadAustralianSites() { if (this.AustralianSites != null && this.AustralianSites.DataSource.Count > 0) { OnLoadDataCompleted(AustralianSitesKey); return; } IsLoadingData = true; LoadingTimer.StartTask("LoadAustralianSites"); //var airportMajorCodesProvider = new AirlinesDataProvider(); //airportMajorCodesProvider.LoadDataCompleted += OnWorldAirportsMajorCodesLoaded; //airportMajorCodesProvider.LoadWorldsAirportsAsync(); var csvDataProvider = new CsvDataProvider(); csvDataProvider.GetDataCompleted += OnLoadAustralianSitesCompleted; csvDataProvider.GetDataAsync("australian_sites.csv"); }
public void ReadSimpleEmptyCsvFile() { var csvDataProvider = new CsvDataProvider(new CsvDataProviderConfiguration { CsvFilePaths = new List <string> { "./CsvFiles/SimpleEmpty.csv" } }); var entityDataList = csvDataProvider.GetEntityDataList(); Assert.IsNotNull(entityDataList); var entityDataSimple = entityDataList[0]; MyAssert.AssertSimpleEmpty(entityDataSimple); }
public static async Task Main(string[] args) { string projectRootDir = DirectoryHelper.GetProjectRootDir(); string directoryCsvResource = Path.Combine(projectRootDir, @"data\csv\resource"); const string fileCsvIngredient = "ingredient.csv"; const string fileCsvOrder = "order.csv"; const string fileCsvDish = "dish.csv"; var ingredientCsvData = await CsvFileReader.ReadData(directoryCsvResource, fileCsvIngredient); var ingredients = CsvDataProvider <Ingredient> .ProvideData(ingredientCsvData, new IngredientMapper()); var orderCsvData = await CsvFileReader.ReadData(directoryCsvResource, fileCsvOrder); var orders = CsvDataProvider <Order> .ProvideData(orderCsvData, new OrderMapper()); var dishCsvData = await CsvFileReader.ReadData(directoryCsvResource, fileCsvDish); var dishes = CsvDataProvider <Dish> .ProvideData(dishCsvData, new DishMapper()); var ingredientRepo = new IngredientRepository(ingredients); var dishRepo = new DishRepository(dishes); IRestaurant restaurant = new Restaurant( new DishService(dishRepo, ingredientRepo), new OrderService(ingredientRepo, dishRepo), new IngredientService(ingredientRepo) ); Console.WriteLine("Before:"); ingredientRepo.GetAll().ForEach(Console.WriteLine); dishRepo.GetAll().ForEach(Console.WriteLine); Console.WriteLine(); orders.ForEach(order => { var(orderId, preparedDishes) = restaurant.ProcessOrder(order); Console.WriteLine($"Order ID: {orderId}"); preparedDishes.ToList().ForEach(Console.WriteLine); }); Console.WriteLine(); Console.WriteLine("After:"); ingredientRepo.GetAll().ForEach(Console.WriteLine); }
public async Task Given_DataFile_When_Loaded_Then_ReturnAllPlayers() { // Arrange var playerParser = new PlayerParser(); var sut = new CsvDataProvider(playerParser); // Act var datafile = Path.Combine(AppContext.BaseDirectory, "Data", "chicago-bulls-trimmed.csv"); var result = await sut.GetPlayersAsync(datafile); // Assert Assert.Collection(result, p => { Assert.Equal(1, p.Id); Assert.Equal(Position.PG, p.Position); Assert.Equal(10, p.Number); Assert.Equal("United States", p.Country); Assert.Equal("Armstrong, B.J.", p.Name); Assert.Equal("6 ft 2 in", p.Height); Assert.Equal("175 lb", p.Weight); Assert.Equal("Iowa", p.University); Assert.Equal(12.3m, p.PointsPerGame); }, p => { Assert.Equal(2, p.Id); Assert.Equal(Position.C, p.Position); Assert.Equal(24, p.Number); Assert.Equal("United States", p.Country); Assert.Equal("Cartwright, Bill", p.Name); Assert.Equal("7 ft 1 in", p.Height); Assert.Equal("246 lb", p.Weight); Assert.Equal("San Francisco", p.University); Assert.Equal(5.6m, p.PointsPerGame); }, p => { Assert.Equal(3, p.Id); Assert.Equal(Position.PF, p.Position); Assert.Equal(54, p.Number); Assert.Equal("United States", p.Country); Assert.Equal("Grant, Horace", p.Name); Assert.Equal("6 ft 10 in", p.Height); Assert.Equal("245 lb", p.Weight); Assert.Equal("Clemson", p.University); Assert.Equal(13.2m, p.PointsPerGame); }); }
public void ReadCsvsFromDictionary() { var csvDataProvider = new CsvDataProvider(new CsvDataProviderConfiguration { FolderPath = "./CsvFiles" }); var entityDataList = csvDataProvider.GetEntityDataList(); Assert.IsNotNull(entityDataList); var entityDataComplex = entityDataList[0]; MyAssert.AssertComplex(entityDataComplex); var entityDataSimple = entityDataList[1]; MyAssert.AssertSimple(entityDataSimple); }
private List <ImportItem> ParseFile() { using (logX.loggerX.InfoCall()) { try { var dataProvider = new CsvDataProvider(); List <string> lines = File.ReadAllLines(textBox_ServersImportFile.Text).ToList(); var totalcols = lines[0].Split(',').Length; string filename = textBox_ServersImportFile.Text; //converting old csv format to new one by adding 2 new columns and the default values for those if (totalcols == 7) { StringBuilder msgBldr = new StringBuilder(); msgBldr.Append("You are trying to import an old csv. By default all servers will be considered as on-premise\n\n"); msgBldr.Append("Import Anyway?"); System.Windows.Forms.DialogResult dr = MsgBox.ShowWarning(ErrorMsgs.ImportServersCaption, msgBldr.ToString()); if (dr == DialogResult.OK) { lines[0] += ",PortNumber, ServerType"; int index = 1; //add new column value for each row. lines.Skip(1).ToList().ForEach(line => { //-1 for header lines[index] += ",1433, 0"; index++; }); //write the new content filename = string.Format("{0}\\{1}", GetAssemblyPath, "test.csv"); File.WriteAllLines(filename, lines); } } return (dataProvider.ParseStream(new FileStream(filename, FileMode.Open))); } catch (Exception ex) { logX.loggerX.Error(ex.Message); throw new InvalidDataException(ErrorMsgs.InvalidImportFileFormat, ex); } } }
private static void Main(string[] args) { Console.WriteLine("HR data import job."); Console.WriteLine("Please do not close this window."); int totalImported = 0; int totalAdded = 0; try { string targetWebUrl = ConfigurationManager.AppSettings["CAWebSiteUrl"]; Logger.Log("Start Job: Import HR data to " + targetWebUrl); Logger.Log("Data Mapping: Read CSV data mapping relations from " + ConfigurationManager.AppSettings["CsvDataMappingFilePath"]); var csvMapping = DataMapperFactory.Instance.GetDataMapper(ProviderType.CSV).GetMapping(); DataTable csvTable; string csvFilePath = ConfigurationManager.AppSettings["CsvFilePath"]; string encodingString = ConfigurationManager.AppSettings["CsvFileEncoding"]; Encoding encoding = encodingString.IsNullOrWhitespace() ? Encoding.Default : Encoding.GetEncoding(encodingString); using (var csvReader = new CsvDataReader(csvFilePath, encoding, ConfigurationManager.AppSettings["CsvDataDelimiter"][0])) { csvTable = new CsvDataProvider(csvReader, csvMapping).ReadAsDataTable(); } var csvRows = csvTable.Rows; Logger.Log("Data Import: Read CSV data [" + csvRows.Count + " rows] from " + csvFilePath); Logger.Log("Data Mapping: Read AD data mapping relations from " + ConfigurationManager.AppSettings["ADDataMappingFilePath"]); var adMapping = DataMapperFactory.Instance.GetDataMapper(ProviderType.AD).GetMapping(); DataTable adTable; string ldap = ConfigurationManager.AppSettings["LDAPString"]; Logger.Log("Data Import: Read AD data from " + ldap); using (var adReader = new ADDataReader(ldap, adMapping.ColumnMappings.Keys.ToArray())) { adTable = new ADDataProvider(adReader, adMapping).ReadAsDataTable(); } string accountNameColumn = adMapping.ColumnMappings["sAMAccountName"].DataTableColumnName; // Update Manager Information for each employee. foreach (DataRow csvRow in csvRows) { csvRow.BeginEdit(); string name = FindManagerAccountFromADTable(adTable, csvRow.Field<string>("ManagerId"), accountNameColumn); csvRow["Manager"] = name; csvRow.EndEdit(); } bool logUpdatedUsers; bool.TryParse(ConfigurationManager.AppSettings["LogUpdatedUsers"], out logUpdatedUsers); using (var site = new SPSite(targetWebUrl)) { Logger.Log("Data Import: Read data import gray list."); var exceptions = SPHelper.GetImportExceptionsFromSPList(site.RootWeb, ConfigurationManager.AppSettings["HRDataImportExceptionsListName"]); var context = ServerContext.GetContext(site); var upm = new UserProfileManager(context); var columns = adTable.Columns; foreach (DataRow r in adTable.Rows) { // User account and Employee ID should not be empty. string account = r.Field<string>(accountNameColumn); string employeeId = r.Field<string>("EmployeeId"); if (account.IsNullOrWhitespace() || employeeId.IsNullOrWhitespace()) { continue; } // If specific account was listed in the import exceptions, ignore it. if (exceptions.Any(e => e.Equals(account.Trim(), StringComparison.InvariantCultureIgnoreCase))) { Logger.Log("Data Import: User [" + account + "] data will not update for the account is in the gray list."); continue; } // Find in the CSV DataTable if any row has same employee ID. foreach (DataRow row in from DataRow row in csvRows let id = row.Field<string>("EmployeeId") where employeeId.Trim().Equals(id) select row) { r.BeginEdit(); foreach (string colName in (from DataColumn col in columns select col.ColumnName).Where(colName => !row.IsNull((string) colName))) { r[colName] = row[colName]; } r.EndEdit(); break; } try { var status = SPHelper.UpdateUserProfileByAccount(upm, account, r, columns); if (status == UserProfileUpdateStatus.Updated) { if (logUpdatedUsers) { Logger.Log(string.Format("Data Import: User [{0}, {1}] was updated.", account, employeeId)); } totalImported++; } else { totalAdded++; } } catch (SPException spex) { Logger.Log("Error: User [" + account + "] data was not updated. Error Message = " + spex.Message); } } } } catch (Exception ex) { Logger.Log("Error: " + ex.Message); } string stat = string.Format(CultureInfo.InvariantCulture, "{0} employees were created, {1} employees were updated.", totalAdded, totalImported); Logger.Log("End Job: Data Import Completed. " + stat + " \r\n======================================================="); }
public InMemoryParser( CsvDataProvider dataProvider ) : base(dataProvider) { }