//Query Pagination data private void QueryPaginationData() { paginationUtils.TotalRecord = examManager.GetExamCount(RememberUserParameter.username, QueryExamState.KeywordState); UserDS.SearchExamDataTable table = examManager.SearchExam(QueryExamState.SortColumn, QueryExamState.SortDirection, paginationUtils.CurrentPage, paginationUtils.PageSize, QueryExamState.KeywordState, RememberUserParameter.username); this.AllExam.GetDgvExamList().DataSource = DataTableUtils.ConvertDataTable(table); EditDataGridView(); }
public void SaveData() { bool bSaved = DataTableUtils.SaveDataTable(_DT, FileName); print("Save as:" + FileName); _SaveFilePath.Invoke(FileName); _TestCount.Invoke(_DT.Rows.Count); }
public JsonResult GetOperateLogListJsonData() { //提取DataTable参数 DataTableUtils.DataTableModel dtm = DataTableUtils.GetJquerydataTableParams(); //构造输入参数 PagingModel paging = new PagingModel(); paging.PageSize = dtm.PageSize; paging.PageCurrent = dtm.PageIndex; paging.FieldShow = "OperateType,Operator,OperateDate,OperateIP,OperateContent,TargetPK,TargetType"; paging.Where = "1=1"; if (!string.IsNullOrWhiteSpace(dtm.FieldCondition)) { paging.Where = dtm.FieldCondition; } //paging.Where = string.Format(" 1 = 1 and {0}", dtm.FieldCondition); if (!string.IsNullOrEmpty(dtm.KeyWord)) { paging.Where = string.Format("{0} and (Operator like '%{1}%' or OperateContent like '%{1}%' or TargetType like '%{1}%')", paging.Where, dtm.KeyWord); } IList <ParamModel> paramAllList = Global.Business.ServiceProvider.ParamService.GetAll(); ParamModel operateTypeParam = paramAllList.SingleOrDefault(S => S.ParamCode == UtilConstants.SysParamType.OperateType); ParamItemModel item = null; //数据库查询数据 IList <Hashtable> operateList = ServiceProvider.OperateLogService.GetAllOperateLogListForPaging(paging); //Json数据格式组装 dtm.iTotalRecords = paging.RecordCount; dtm.iTotalDisplayRecords = dtm.iTotalRecords; dtm.aaData = new List <List <string> >(); StringBuilder sb = new StringBuilder(); string orderId = string.Empty; foreach (Hashtable row in operateList) { dtm.aaData.Add(new List <string>()); item = operateTypeParam.itemsList.SingleOrDefault(p => p.ParamItemValue.Equals(row["OperateType"].ToString())); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", item == null?"":item.ParamItemName)); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", row["Operator"])); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", row["OperateIP"])); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", row["OperateContent"])); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", row["TargetPK"])); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0}", (UtilConstants.TargetType)Convert.ToInt32(row["TargetType"]))); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("{0:yyyy-MM-dd HH:mm:ss}", row["OperateDate"])); } JsonResult jr = Json(new { sEcho = dtm.sEcho, iTotalRecords = dtm.iTotalRecords, iTotalDisplayRecords = dtm.iTotalDisplayRecords, aaData = dtm.aaData }, JsonRequestBehavior.AllowGet); return(jr); }
public QueryResults query(VistaQuery query) { // if we're querying unpacked, we should add the "WID" field to our fields string so DDR LISTER will return the identifier values String[] ddrResults = _dao.ddrLister(query.SiteCode, query.VistaFile, query.IENS, query.Fields, query.Flags, query.MaxRecords, query.From, query.Part, query.XREF, query.Screen, query.Identifier); // special 63.04 handling if (String.Equals(query.VistaFile, "63.04")) { _labChemIens.Add(query.IENS.Replace(",", "")); // add the file 63 IEN to this collection for ticket #76 //return new LabChemUtils(_labChemDataDictionary).parseLabChemDdrResults(query, ddrResults); return(new LabChemUtils().parseLabChemDdrResults(query, ddrResults)); } // end special 63.04 handling // first check to see if we should fetch key/val (aka vertical) results DataTable verticalResults = null; if (!String.IsNullOrEmpty(query.WP_Or_Computed_Fields) && !String.IsNullOrEmpty(query.Gets_Alignment)) // KEY/VAL SUB QUERIES { verticalResults = getVerticalResultsForQueries(query, ddrResults, query.IENS); } // if not looking for vertical, do we need to fetch WP or other large fields and add them to DDR? else if (!String.IsNullOrEmpty(query.WP_Or_Computed_Fields)) // WP fields { ddrResults = addWpOrComputed(query, ddrResults); } QueryResults qr = null; // if we are fetching WP fields AND this configuration isn't building a key value table AND there are subfiles then we need to call our super special method in DataTableUtils! ticket #16 if (!String.IsNullOrEmpty(query.WP_Or_Computed_Fields) && String.IsNullOrEmpty(query.Gets_Alignment) && !String.IsNullOrEmpty(query.IdentifiedFiles)) { DataTableUtils.adjustDdrResultsWithWpAndIdentifiedFiles(query, ddrResults); qr = DataTableUtils.toQueryResultsFromDdr(query, ddrResults); // ddrResults is "fixed" by adjust function } else // for most cases, just building this up without special logic above { qr = DataTableUtils.toQueryResultsFromDdr(query, ddrResults); } // ugh - this seems ugly and hackish doing this out of process from the subqueries for WP fields above... oh, well, seems ok for now at least if (!String.IsNullOrEmpty(query.WP_Or_Computed_Fields) && _exceptionBag != null) { foreach (Exception e in _exceptionBag) { _report.Exceptions.Add(e); } } // did we have any key/val queries? if so, add the table to our results if (verticalResults != null) { qr.DdrResults.Add(verticalResults); } return(qr); // DataTableUtils.toQueryResultsFromDdr(query, ddrResults); }
/// <summary> /// 检索条件 /// </summary> /// <returns></returns> private string GetInstrumentSearchCondition() { DataTableUtils.DataTableModel dtm = DataTableUtils.GetJquerydataTableParams(); string where = "1=1"; if (!string.IsNullOrWhiteSpace(dtm.FieldCondition)) { where = dtm.FieldCondition; } return(where); }
public static void recordToCsv <T>(IList <T> data, string tag, string type, string parameters = "", string performance = "") { var fullPath = ConfigurationManager.AppSettings["RootPath"] + ConfigurationManager.AppSettings["CacheData.ResultPath"] + ConfigurationManager.AppSettings["CacheData.StrategyPath"]; var dateStr = Kit.ToInt_yyyyMMdd(DateTime.Now).ToString(); fullPath = ResultPathUtil.GetLocalPath(fullPath, tag, dateStr, type, parameters, performance); var dt = DataTableUtils.ToDataTable(data); CsvFileUtils.WriteToCsvFile(fullPath, dt); }
//extract repeated code private void DynamicSort(ExamService.ExamServiceClient examManager, string SortColumn, string SortDirection) { QueryExamState.SortColumn = SortColumn; QueryExamState.SortDirection = SortDirection; UserDS.SearchExamDataTable table = examManager.SearchExam(QueryExamState.SortColumn, QueryExamState.SortDirection, ((MyExam)this.Parent).GetCurrentPage(), ((MyExam)this.Parent).GetPageSize(), QueryExamState.KeywordState, RememberUserParameter.username); this.dgvExamList.DataSource = DataTableUtils.ConvertDataTable(table); }
public ActionResult GetServerJsonData(int orgId) { //提取DataTable参数 DataTableUtils.DataTableModel dtm = DataTableUtils.GetJquerydataTableParams(); string code = ServiceProvider.OrgService.GetCodeById(orgId); //构造输入参数 PagingModel paging = new PagingModel(); paging.PageSize = dtm.PageSize; paging.PageCurrent = dtm.PageIndex; paging.Where = "1=1"; if (!string.IsNullOrWhiteSpace(dtm.FieldCondition)) { paging.Where = dtm.FieldCondition; } paging.Where = string.Format(" {0} and BelongDepart like '{1}%'", paging.Where, code); if (!string.IsNullOrEmpty(dtm.KeyWord)) { paging.Where = string.Format("{0} and (UserName like '{1}%' or LoginName like '{1}%')", paging.Where, dtm.KeyWord); } //数据库查询数据 IList <Hashtable> userList = ServiceProvider.UserService.GetListForPaging(paging); //Json数据格式组装 dtm.iTotalRecords = paging.RecordCount; dtm.iTotalDisplayRecords = dtm.iTotalRecords; dtm.aaData = new List <List <string> >(); foreach (var row in userList) { string userId = UtilsHelper.Encrypt(row["UserId"].ToString()); dtm.aaData.Add(new List <string>()); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("<a href='/Employee/PersonInfo?userId={0}' target=\"_blank\">{1}</a>", userId, row["UserName"])); dtm.aaData[dtm.aaData.Count - 1].Add(row["Sex"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(row["LoginName"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(row["OrgName"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(row["Duty"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(row["IsEnabled"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(row["EmployeeState"].ToString()); dtm.aaData[dtm.aaData.Count - 1].Add(string.Format("<a href='#' onclick=\"fnNewWindow({0});return false;\">角色设置</a> <a href='#' onclick=\"fnConfirmWithoutF5('确定重置账户【{1}】密码?','/SysManage/User/ResetPassword?userId={0}');return false;\">重置密码</a> <a href='#' onclick=\"fnConfirmWithF5('确定要禁用账户:{1}','/SysManage/User/DisableAccout?userId={0}');return false;\">禁用</a> <a href='#' onclick=\"fnConfirmWithF5('确定要启用账户:{1}','/SysManage/User/EnableAccout?userId={0}');return false;\">启用</a> <a href='#' onclick=\"fnConfirmWithF5('确定要删除用户:{1}','/SysManage/User/DeleteUser?userId={0}');return false;\">删除</a>", row["UserId"], row["UserName"])); } JsonResult jr = Json(new { sEcho = dtm.sEcho, iTotalRecords = dtm.iTotalRecords, iTotalDisplayRecords = dtm.iTotalDisplayRecords, aaData = dtm.aaData }, JsonRequestBehavior.AllowGet); return(jr); }
public DataSet GetWarehouseDataSet(string facCd, string strWhere, string whCd, string whDesc) { ITWhPrcsMsDao d = ComponentLocator.Instance().Resolve <ITWhPrcsMsDao>(); IList <TWhPrcsMsNoAR> list = d.getAllWhPrcsByCdNm(facCd, strWhere, whCd, whDesc); DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CCodeRefWarehouse"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
public DataSet GetClsDetailDataSet(string clsCd) { ICClsDetailNoARDao d = ComponentLocator.Instance().Resolve <ICClsDetailNoARDao>(); IList <CClsDetailNoAR> list = d.GetClsDetailList(LangUtils.GetCurrentLanguage(), clsCd); DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CClsDetailNoAR"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
public DataSet GetPersonDataSet(string companyCd, string sectionCd, string personCd, string personNm) { ICTPersonMsNoARDao d = ComponentLocator.Instance().Resolve <ICTPersonMsNoARDao>(); IList <CTPersonMsNoAR> list = d.GetPersonMsList(companyCd, sectionCd, personCd, personNm); DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CTPersonMsNoAR"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
public DataSet GetSectionDataSet(string companyCd, string secCd, string secNm) { ITSectionMsDao d = ComponentLocator.Instance().Resolve <ITSectionMsDao>(); IList <TSectionMsNoAR> list = d.getAllSectionByCdNm(companyCd, secCd, secNm); DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CCodeRefSection"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
public DataSet GetTradeForMaterialDataSet(string companyCd, string dlCd, string dlDesc) { ICCodeRefTradeForMaterialNoARDao d = ComponentLocator.Instance().Resolve <ICCodeRefTradeForMaterialNoARDao>(); IList <CCodeRefTradeForMaterialNoAR> list = d.GetCodeRefTradeForMaterial(LangUtils.GetCurrentLanguage(), companyCd, dlCd, dlDesc); DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CCodeRefTradeForMaterial"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
/// <summary> /// Strip all delimiters being used in our delimited file format, RS and US by default, from results /// </summary> /// <param name="arg"></param> /// <returns></returns> public static String[] stripInvalidChars(String[] result) { // wish we didn't have to do this here but string does not appear to be encoded as expected until after deserialization if (result != null && result.Length > 0) { IList <Char> stripChars = DataTableUtils.getDelimiters(); for (int i = 0; i < result.Length; i++) { result[i] = StringUtils.stripChars(result[i], stripChars); } } return(result); }
/// <summary> /// Runs the tests with scenario. /// </summary> /// <param name="scenario">The scenario.</param> /// <param name="source">The source.</param> /// <param name="destination">The destination.</param> /// <param name="dateTime">The date time.</param> public void RunTestsWithScenario(HarmonySearchTestScenario scenario, Location source, Location destination, DateTime?dateTime = null) { var graph = _graphBuilder.GetGraph(dateTime ?? DateTime.Now); var topResultDirectory = $"Tests_{DateTime.Now:ddMMyyyy_HHmm}_{source.Name.Trim()}_{destination.Name.Trim()}"; var infoDataTable = DataTableUtils.GetCommonInfoDataTable(source, destination); var harmonySearchAverageTestResults = RunTests(scenario, source, destination, graph, topResultDirectory, infoDataTable); var aStarAverageTestResults = RunAStarTests(graph, source, destination, topResultDirectory, infoDataTable); ExportAverageTestResults(harmonySearchAverageTestResults, aStarAverageTestResults, infoDataTable, topResultDirectory); }
/// <summary> /// 温度与时间的数据 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Button4_Click(object sender, EventArgs e) { SaveFileDialog fileDialog = new SaveFileDialog(); fileDialog.Filter = "文档|*.csv"; fileDialog.InitialDirectory = Application.StartupPath; if (fileDialog.ShowDialog() == DialogResult.OK) { DataTableUtils.DataTableToCsvT(formMain.ElectDt, fileDialog.FileName); MessageBox.Show("保存成功!"); } fileDialog.Dispose(); }
public void CorrOf100and500() { List <StockMinute> all100 = new List <StockMinute>(); List <StockMinute> all500 = new List <StockMinute>(); List <CorrStatic> corrList = new List <CorrStatic>(); for (int i = 0; i < tradeDays.Count(); i++) { var now = tradeDays[i]; var index100 = Platforms.container.Resolve <StockMinuteRepository>().fetchFromLocalCsvOrWindAndSave("399330.SZ", now); var index500 = Platforms.container.Resolve <StockMinuteRepository>().fetchFromLocalCsvOrWindAndSave("000905.SZ", now); all100.AddRange(index100); all500.AddRange(index500); } for (int i = 0; i < tradeDays.Count() - period; i++) { DateTime start = tradeDays[i]; DateTime end = tradeDays[i + period]; int startIndex = i * 240; int endIndex = (i + period) * 240 - 1; CorrStatic corr = new CorrStatic() { start = start, end = end, underlying1 = "399330.SZ", underlying2 = "000905.SZ", corr = 0 }; List <double> underlying1 = new List <double>(); List <double> underlying2 = new List <double>(); for (int j = startIndex; j < endIndex; j++) { underlying1.Add(all100[j].close); underlying2.Add(all500[j].close); } corr.corr = getCorr(underlying1, underlying2); corrList.Add(corr); } var dt = DataTableUtils.ToDataTable(corrList); string path = "d:\\corr0.csv"; try { var s = (File.Exists(path)) ? "覆盖" : "新增"; CsvFileUtils.WriteToCsvFile(path, dt); } catch (Exception e) { } }
public DataSet GetFactoryDataSet(string companyCd, string facCd, string facNm) { ITFactoryMsDao d = ComponentLocator.Instance().Resolve <ITFactoryMsDao>(); IList <TFactoryMs> list = d.getAllFactoryByCdNm(companyCd, facCd, facNm); if (list.Count == 0) { list.Add(new TFactoryMs()); } DataTable dt = DataTableUtils.ToDataTable(list); dt.TableName = "CCodeRefFactory"; DataSet ds = new DataSet(); ds.Tables.Add(dt); return(ds); }
static public bool ReadPlog(String filename, DataTable messageTable, DataTable metaTable, out String lastError) { lastError = ""; if (!File.Exists(filename)) { lastError = String.Format("Cannot find file: {0}", filename); return(false); } // Чтение файла String fullTableFilter = messageTable.DefaultView.RowFilter; XmlReadMode readMode = XmlReadMode.Auto; DataSet dataset = new DataSet(); dataset.Tables.Add(metaTable); dataset.Tables.Add(messageTable); readMode = dataset.ReadXml(filename); dataset.Tables.Clear(); //При добавлении таблицы в DataSet у ней сбрасывается DataView messageTable.DefaultView.RowFilter = fullTableFilter; messageTable.DefaultView.AllowNew = false; if (readMode == XmlReadMode.IgnoreSchema) { lastError = "Incorrect log format"; return(false); } // Проверка версии int version = 0; object plogVersion = metaTable.Rows[0][DataColumnNames.PlogVersion]; int.TryParse(plogVersion.ToString(), out version); if (version <= 0 || version > DataTableUtils.PlogVersion) { lastError = "Incorrect version of plog-file"; return(false); } else if (version > 0 && version < DataTableUtils.PlogVersion) { DataTableUtils.UpgradePlog(messageTable, metaTable, filename); } return(true); }
public ReportDataSource ReportData_Load(string datasetName, SearchCondition condition) { ICTPmMsNoARDao d = ComponentLocator.Instance().Resolve <ICTPmMsNoARDao>(); LoginUserInfoVo uservo = (LoginUserInfoVo)SessionUtils.GetSession(SessionUtils.COMMON_LOGIN_USER_INFO); SearchCondition condition1 = new SearchCondition(); condition1.AddCondition("companyCd", uservo.CompanyCondition.ICompanyCd); condition1.AddCondition("langCd", LangUtils.GetCurrentLanguage()); condition1.SetAddtionalCondition("ALLFACTORY", false); IList <CTPmMsNoAR> list = d.GetPmMsDetail(condition1); DataSet ds = new DataSet(); DataTable dt = DataTableUtils.ToDataTable(list); DataColumn col = new DataColumn(); col.DataType = typeof(byte[]); col.ColumnName = "BarCode"; dt.Columns.Add(col); dt.TableName = "TPmMs"; ds.Tables.Add(dt); foreach (DataRow row in dt.Rows) { int W = Convert.ToInt32(200); int H = Convert.ToInt32(40); BarcodeLib.AlignmentPositions Align = BarcodeLib.AlignmentPositions.CENTER; BarcodeLib.TYPE type = BarcodeLib.TYPE.CODE39; BarcodeLib.Barcode b = new BarcodeLib.Barcode(); b.IncludeLabel = true; b.Alignment = Align; b.LabelPosition = BarcodeLib.LabelPositions.BOTTOMCENTER; b.LabelFont = new Font("宋体", 6); b.Encode(type, row["IDrwNo"].ToString(), Color.Black, Color.White, W, H); row["BarCode"] = b.GetImageData(BarcodeLib.SaveTypes.PNG); } ReportDataSource result = new ReportDataSource(datasetName, ds.Tables["TPmMs"]); return(result); }
public List <SysApiAccessToken> GetApiAccessToken() { SqlCommand cmd = new SqlCommand("dbo.SysApi_GetAccessTokens"); var dt = _sqlDatabase.ExecuteSelect(cmd); List <SysApiAccessToken> accessList = new List <SysApiAccessToken>(); foreach (DataRow row in dt.Rows) { var accessToken = new SysApiAccessToken(); accessToken.SysApiId = DataTableUtils.GetValue <int>(row, "SysApiId"); accessToken.ApiCode = DataTableUtils.GetValue <string>(row, "Code"); accessToken.Token = DataTableUtils.GetValue <string>(row, "Token"); accessList.Add(accessToken); } return(accessList); }
public StringBuilder encode(StringBuilder sb, ClassicEncodingSettings settings, bool isTransferEncode, int encodeLevel) { if (sb.Length + (estimateDataSize() * BUFFER_MULTIPLIER) > sb.Capacity) { sb.EnsureCapacity((int)(sb.Capacity + (estimateDataSize() * BUFFER_MULTIPLIER))); } StringBuilder tempSB = new StringBuilder(); tempSB.Append(TRANSCODER_VERSION.ToString()); tempSB.Append(SEPARATOR); tempSB.Append(getId() != null ? getId().ToString() : DataTableUtils.DATA_TABLE_NULL); tempSB.Append(SEPARATOR); tempSB.Append(getName() != null ? getName() : DataTableUtils.DATA_TABLE_NULL); tempSB.Append(SEPARATOR); tempSB.Append(getPreview() != null ? getPreview().Length.ToString() : "-1"); tempSB.Append(SEPARATOR); tempSB.Append(getData() != null ? getData().Length.ToString() : "-1"); tempSB.Append(SEPARATOR); if (isTransferEncode) { sb.Append(DataTableUtils.transferEncode(tempSB.ToString())); } else { sb.Append(tempSB); } appendBytes(sb, getPreview(), isTransferEncode, encodeLevel); appendBytes(sb, getData(), isTransferEncode, encodeLevel); return(sb); }
public async Task ProcessStage2(Guid importId) { var statisticsDbContext = DbUtils.CreateStatisticsDbContext(); var import = await _dataImportService.GetImport(importId); var subject = await statisticsDbContext.Subject.FindAsync(import.SubjectId); var metaFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, import.MetaFile.Path()); var metaFileTable = DataTableUtils.CreateFromStream(metaFileStream); _importerService.ImportMeta(metaFileTable, subject, statisticsDbContext); await statisticsDbContext.SaveChangesAsync(); await _fileImportService.ImportFiltersAndLocations(import.Id, statisticsDbContext); await statisticsDbContext.SaveChangesAsync(); }
public async Task SplitDataFile(Guid importId) { var import = await _dataImportService.GetImport(importId); var dataFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, import.File.Path()); var dataFileTable = DataTableUtils.CreateFromStream(dataFileStream); if (dataFileTable.Rows.Count > import.RowsPerBatch) { _logger.LogInformation($"Splitting Datafile: {import.File.Filename}"); await SplitFiles(import, dataFileTable); _logger.LogInformation($"Split of Datafile: {import.File.Filename} complete"); } else { _logger.LogInformation($"No splitting of datafile: {import.File.Filename} was necessary"); } }
/// <summary> /// 将数据以csv文件的形式保存到CacheData文件夹下的预定路径 /// </summary> /// <param name="data">要保存的数据</param> /// <param name="path">读写文件路径</param> /// <param name="appendMode">是否为追加的文件尾部模式,否则是覆盖模式</param> public virtual void saveToLocalCsv(string path, IList <T> data, bool appendMode = false) { if (data == null) { log.Error("没有任何内容可以保存到csv!"); return; } var dt = DataTableUtils.ToDataTable(data); try { var s = (File.Exists(path)) ? "覆盖" : "新增"; CsvFileUtils.WriteToCsvFile(path, dt, appendMode); log.Debug("文件已{0}:{1}. 共{2}行数据.", s, path, data.Count); } catch (Exception e) { log.Error(e, "保存到本地csv文件失败!({0})", path); } }
public async Task SplitDataFile( ImportMessage message, SubjectData subjectData) { await using var dataFileStream = await _fileStorageService.StreamBlob(subjectData.DataBlob); var dataFileTable = DataTableUtils.CreateFromStream(dataFileStream); if (dataFileTable.Rows.Count > message.RowsPerBatch) { _logger.LogInformation($"Splitting Datafile: {message.DataFileName}"); await SplitFiles(message, subjectData, dataFileTable); _logger.LogInformation($"Split of Datafile: {message.DataFileName} complete"); } else { _logger.LogInformation($"No splitting of datafile: {message.DataFileName} was necessary"); } }
/// <summary> /// Exports the average test results. /// </summary> /// <param name="testResults">The test results.</param> /// <param name="aStarTestResults">The A* test results</param> /// <param name="commonInfoDataTable">The common information data table.</param> /// <param name="outputDirectory">The output directory.</param> private void ExportAverageTestResults(List <HarmonySearchAverageTestResult> testResults, AverageTestResult aStarTestResults, DataTable commonInfoDataTable, string outputDirectory) { // Group Harmony Search results by type var groupedTestResults = TestResultUtils.GroupTestResults(testResults); // Get Harmony Search results data tables var resultDataTables = new Dictionary <string, List <DataTable> >(); foreach (var(typeName, groupedResults) in groupedTestResults) { var groupDataTables = new List <DataTable>(); foreach (var(groupKey, groupResults) in groupedResults) { var dataTable = DataTableUtils.GetHarmonySearchTestResultsDataTable(groupKey, groupResults, typeName); groupDataTables.Add(dataTable); } resultDataTables[typeName] = groupDataTables; } var comparisonResults = testResults.Where(r => r.HarmonyGeneratorType == HarmonyGeneratorType.RandomDirectedStop) .Cast <AverageTestResult>() .ToList(); comparisonResults.Add(aStarTestResults); // Add A* result data tables var aStarResultDataTable = DataTableUtils.GetTestResultsDataTable("Comparison", comparisonResults); resultDataTables["AStar"] = new List <DataTable> { aStarResultDataTable }; // Export average result to Excel var filePath = Path.Combine(outputDirectory, "AverageTestResults"); _excelExportService.ExportToExcel(commonInfoDataTable, resultDataTables, filePath); }
public async Task ImportFiltersAndLocations(Guid importId, StatisticsDbContext context) { var import = await _dataImportService.GetImport(importId); var subject = await context.Subject.FindAsync(import.SubjectId); var dataFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, import.File.Path()); var dataFileTable = DataTableUtils.CreateFromStream(dataFileStream); var metaFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, import.MetaFile.Path()); var metaFileTable = DataTableUtils.CreateFromStream(metaFileStream); await _importerService.ImportFiltersAndLocations( import, dataFileTable.Columns, dataFileTable.Rows, _importerService.GetMeta(metaFileTable, subject, context), context); }
public void compute() { List <HistoricalVol> volList = new List <HistoricalVol>(); List <DateTime> tradeDays = DateUtils.GetTradeDays(today.AddDays(-360), today.AddDays(-1)); for (int i = 0; i < tradeDays.Count(); i++) { DateTime time = tradeDays[i]; HistoricalVol vol = new HistoricalVol(); vol.time = time; volList.Add(vol); } computeVol("M1705.DCE", period1, ref volList); computeVol("M1705.DCE", period2, ref volList); computeVol("M1707.DCE", period1, ref volList); computeVol("M1707.DCE", period2, ref volList); computeVol("M1708.DCE", period1, ref volList); computeVol("M1708.DCE", period2, ref volList); computeVol("M1709.DCE", period1, ref volList); computeVol("M1709.DCE", period2, ref volList); computeVol("M1711.DCE", period1, ref volList); computeVol("M1711.DCE", period2, ref volList); computeVol("M1712.DCE", period1, ref volList); computeVol("M1712.DCE", period2, ref volList); computeVol("M1801.DCE", period1, ref volList); computeVol("M1801.DCE", period2, ref volList); computeVol("M1803.DCE", period1, ref volList); computeVol("M1803.DCE", period2, ref volList); var dt = DataTableUtils.ToDataTable(volList); string path = "historicalVol.csv"; try { var s = (File.Exists(path)) ? "覆盖" : "新增"; CsvFileUtils.WriteToCsvFile(path, dt); } catch (Exception e) { } }
public void Init(string szNCFilename) { if (!File.Exists(szNCFilename)) { throw new Exception("File Does not exist"); } FileStream file = new FileStream(szNCFilename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // Create a new stream to read from a file StreamReader sr = new StreamReader(file); // return sr.ReadToEnd(); csvDataTable = csvExToDataTable(szNCFilename, false); //DataRow[] rows = csvDataTable.Select(filter); System.Diagnostics.Debug.Print(DataTableUtils.DataTable2CSV(csvDataTable, ",", true)); var data = csvDataTable.Select(filter); csvDataTable = data.CopyToDataTable <DataRow>(); oStopWatch.Reset(); }