//---------------------Generate Waterfall--------------------------------- public List <DataTable> Generate() { List <DataTable> reports = new List <DataTable>(); var yearsAndMonths = ProcessFiscalYears(_waterfallContext.SelectedFiscalYears); ProcessedFiscalYears = yearsAndMonths; var trialBalanceQueries = GenerateSUMQueriesForCategoriesAndYears(yearsAndMonths); GeneratedQueries = trialBalanceQueries; _waterfallContext.SelectedDemographics.RemoveAll(x => string.IsNullOrWhiteSpace(x)); string demographicsSelect = string.Join(",", _waterfallContext.SelectedDemographics) + ","; var waterfallQuery = $@" Select PROJ_ID AS [Project Id], {demographicsSelect}" + string.Join(",", trialBalanceQueries.Select(x => x.Query).ToList()) + $@" FROM SawyerSight.PROJ pr JOIN SawyerSight.CUST c on pr.CUST_ID=c.CUST_ID WHERE LVL_NO = {_waterfallContext.SelectedProjectsLevel} AND pr.ReportDataSetID = {_waterfallContext.ReportDataSetID} AND c.ReportDataSetID={_waterfallContext.ReportDataSetID} "; var trialBalanceQuery = $@"SELECT * FROM ( { string.Join("UNION ALL", GenerateTrialBalanceSummaryQueries(yearsAndMonths).Select(x => x.Query).ToList())} ) AS Temp Order By ACCT_ID"; SignalRProcessor.SendImportUpdate("Generating Waterfall Report", itemsProgress++, itemsCount); reports.Add(_migrationService.GenerateWaterfall(waterfallQuery)); reports.Add(_migrationService.GenerateWaterfall(trialBalanceQuery)); //pass years data table to customize column names DataTable yearsDataTable = new DataTable(); //add columns to table for (int i = 0; i < ProcessedFiscalYears.Keys.Count(); i++) { yearsDataTable.Columns.Add(ProcessedFiscalYears.Keys.ElementAt(i)); } for (int j = 0; j < 12; j++) { DataRow newRow = yearsDataTable.Rows.Add(); foreach (var key in ProcessedFiscalYears.Keys) { if (ProcessedFiscalYears[key].Count > j) { newRow[key] = ProcessedFiscalYears[key][j]; } } } reports.Add(yearsDataTable); SignalRProcessor.SendImportUpdate("Generating Waterfall Report Finished. You will be redirected to the Excel Preview.", itemsCount, itemsCount); return(reports); }
public void Export() { int itemsCount = 8; int itemsProgress = 1; /* */ SignalRProcessor.SendImportUpdate("Processing Selected Organizations", itemsProgress++, itemsCount); var organizations = _etlService.GetOrganizations(_waterfallContext.ReportDataSetID); var filteredOrganizations = ProcessSelectedOrganizations(organizations, _waterfallContext.SelectedOrganizations); _migrationService.ImportOrganizations(filteredOrganizations, _waterfallContext.ReportDataSetID); SignalRProcessor.SendImportUpdate("Processing Selected Projects", itemsProgress++, itemsCount); var projects = _etlService.GetETLProjects(_waterfallContext.ReportDataSetID); var filteredProjects = ProcessSelectedProjects(projects, _waterfallContext.SelectedProjects).Distinct().ToList(); _migrationService.ImportProjects(filteredProjects, _waterfallContext.ReportDataSetID); SignalRProcessor.SendImportUpdate("Processing Customers from CUST Table", itemsProgress++, itemsCount); var allCustomers = _etlService.GetAllCustomers(_waterfallContext.ReportDataSetID); _migrationService.ImportCustomers(allCustomers, _waterfallContext.ReportDataSetID); SignalRProcessor.SendImportUpdate("Processing Projects Levels", itemsProgress++, itemsCount); _migrationService.CrossMapProjects(_waterfallContext.ReportDataSetID, _waterfallContext.SelectedProjectsLevel); SignalRProcessor.SendImportUpdate("Processing Selected Trial Balance Accounts", itemsProgress++, itemsCount); var allTrialBalances = new List <string>(); allTrialBalances.AddRange(_waterfallContext.RevenueAccounts.Revenue1Nodes); allTrialBalances.AddRange(_waterfallContext.RevenueAccounts.Revenue2Nodes); allTrialBalances.AddRange(_waterfallContext.RevenueAccounts.Revenue3Nodes); allTrialBalances.AddRange(_waterfallContext.RevenueAccounts.Revenue4Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs1Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs2Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs3Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs4Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs5Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs6Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs7Nodes); allTrialBalances.AddRange(_waterfallContext.CostsAccounts.Costs8Nodes); SignalRProcessor.SendImportUpdate("Processing Accounts from ACCT Table", itemsProgress++, itemsCount); var allAccounts = _etlService.GetAllAcct(_waterfallContext.ReportDataSetID, allTrialBalances); _migrationService.ImportAccounts(allAccounts, _waterfallContext.ReportDataSetID); var trialAccounts = _etlService.GetAllTrialBalanceTransactionAmounts(filteredProjects.Select(x => x.PROJ_ID).ToList(), allTrialBalances, _waterfallContext.ReportDataSetID); SignalRProcessor.SendImportUpdate("Processing Amounts", itemsProgress++, itemsCount); _migrationService.ImportGlPostSum(trialAccounts, _waterfallContext.ReportDataSetID); //------------------------------------ Finished importing data --------------------------------------------------- }
public bool LoadClientData(string ClientPath, int reportDataSetID, byte[] zipArchive = null) { errorOccured = false; ReportDataSetID = reportDataSetID; try { if (zipArchive != null) { if (Directory.Exists(ClientPath)) { Directory.Delete(ClientPath, true); } Directory.CreateDirectory(ClientPath); File.WriteAllBytes(Path.Combine(ClientPath, "Archive.zip"), zipArchive); ZipFile.ExtractToDirectory(Path.Combine(ClientPath, "Archive.zip"), Path.Combine(ClientPath, "Extracted")); } SignalRProcessor.SendImportUpdate("Extraction Completed. Loading data...", itemsProgress++, itemsCount); workingDirectory = Path.Combine(ClientPath, "Extracted"); List <string> tablesForExtraction = File.ReadAllLines(Path.Combine(workingDirectory, "tables.txt")).ToList(); ExecutionManifest executionLog = JsonConvert.DeserializeObject <ExecutionManifest>(File.ReadAllText(Path.Combine(workingDirectory, "executionLog.log"))); if (executionLog.Status == "SUCCESS") { foreach (var table in tablesForExtraction) { var tableLog = executionLog.TablesInfo.FirstOrDefault(x => x.TableName == table); if (tableLog.Status == "SUCCESS" && tableLog.TableExported == 1) { SignalRProcessor.SendImportUpdate($"Reading table {table}", itemsProgress++, itemsCount); var dt = CSVToDataTable(table, executionLog.ExportEngine); if (dt != null) { SignalRProcessor.SendImportUpdate($"Importing table {table}", itemsProgress++, itemsCount); ImportDataTable(table, dt, ReportDataSetID); } } } Directory.Delete(ClientPath, true); } } catch (Exception ex) { Directory.Delete(ClientPath, true); SignalRProcessor.SendImportUpdate($"Archive Failed Processing. Error:" + ex.Message, itemsProgress++, itemsCount); errorOccured = true; } return(!errorOccured); }
private DataTable CSVToDataTable(string table, string exportEngine) { try { return(CsvFileToDatatable(table, exportEngine)); } catch (Exception ex) { failedTables.Add(table); errorOccured = true; errors.Add(ex.StackTrace); SignalRProcessor.SendImportUpdate($"Reading table {table} from CSV file failed. Error: " + ex.Message + Environment.NewLine + "Stack Trace: " + ex.StackTrace, itemsProgress++, itemsCount); } return(null); }
private void ImportDataTable(string tableName, DataTable data, int reportDataSetID) { try { _etlService.InsertTable(tableName, data, reportDataSetID); SignalRProcessor.SendImportUpdate($"Table {tableName} processed.", itemsProgress++, itemsCount); } catch (Exception ex) { SignalRProcessor.SendImportUpdate($"Table {tableName} processing failed. Importing process might not complete successfully. Error:" + ex.Message + Environment.NewLine + "StackTrace:" + ex.StackTrace, itemsProgress++, itemsCount); failedTables.Add(tableName); errorOccured = true; errors.Add(ex.StackTrace); } }