/// <summary> ///udb中导入shp /// </summary> /// <param name="importPath"></param> public string ImportShp(string importPath, DatasourceConnectionInfo info) { try { string name = importPath.Substring(importPath.LastIndexOf("\\")+1); //wks.Datasources.Open(info); // 1. 构建数据源连接对象。 // info.Database = @"G:\数据转换\测试数据\Test\text.udb";数据库型 //DatasourceConnectionInfo info = new DatasourceConnectionInfo(); //info.Server = sourceUDB;//如@"G:\数据转换\测试数据\Test\text.udb"; ImportSettingSHP importSettingSHP = new ImportSettingSHP(); importSettingSHP.ImportMode = ImportMode.Overwrite;//可复写 importSettingSHP.SourceFilePath = importPath; importSettingSHP.TargetDatasourceConnectionInfo = info; //importSettingSHP.IsAttributeIgnored = false; // 3. 获取导入设置对象的导入信息集合(ImportDataInfos),设置目标数据集的名字。默认为原名 //ImportDataInfo dataInfos = importSettingSHP.GetTargetDataInfos(""); //importSettingSHP.SetTargetDataInfos(dataInfos); // 4. 构建数据导入类对象(DataImport),构建并设置导入设置对象集合。 DataImport import1 = new DataImport(); ImportSettings settings = import1.ImportSettings; settings.Add(importSettingSHP); ImportResult dd = import1.Run(); i++; if (dd.FailedSettings.Length != 0) return "【shp数据导入】" + name + "导入失败!请检查数据是否含有有效记录。\t\n"; return null; } catch (Exception ex) { MessageBox.Show(ex.Message); return null; } }
/// <summary> /// The import statics. /// </summary> private void ImportStatics() { var loadWrapper = new Action <string, Action <Stream> >( (file, streamAction) => { var fullFile = this.Get <HttpRequestBase>().MapPath(file); if (!File.Exists(fullFile)) { return; } // import into board... using (var stream = new StreamReader(fullFile)) { streamAction(stream.BaseStream); stream.Close(); } }); var boards = this.GetRepository <Board>().ListTyped(); // Upgrade all Boards boards.ForEach( board => { this.Get <IRaiseEvent>().Raise(new ImportStaticDataEvent(board.ID)); // load default bbcode if available... loadWrapper(BbcodeImport, s => DataImport.BBCodeExtensionImport(board.ID, s)); // load default extensions if available... loadWrapper(FileImport, s => DataImport.FileExtensionImport(board.ID, s)); // load default spam word if available... loadWrapper(SpamWordsImport, s => DataImport.SpamWordsImport(board.ID, s)); }); }
public object GetArgumentValue() { if (string.IsNullOrEmpty(txtHistroyFile.Text) || !File.Exists(txtHistroyFile.Text)) { return(null); } IDataImportDriver driver = DataImport.GetDriver(_productIdentify, _subProductIdentify, txtHistroyFile.Text, null); if (driver == null) { return(null); } string error = null; if (!string.IsNullOrEmpty(_currentFileName)) { using (RasterDataProvider provider = GeoDataDriver.Open(_currentFileName) as RasterDataProvider) { return(driver.Do(_productIdentify, _subProductIdentify, provider, txtHistroyFile.Text, out error)); } } return(null); }
/// <summary> /// Try to Import from selected File /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="System.EventArgs"/> instance containing the event data.</param> protected void Import_OnClick([NotNull] object sender, [NotNull] EventArgs e) { // import selected file (if it's the proper format)... if (!this.importFile.PostedFile.ContentType.StartsWith("text")) { this.PageContext.AddLoadMessage( this.GetTextFormatted("IMPORT_FAILED", this.importFile.PostedFile.ContentType), MessageTypes.danger); this.PageContext.PageElements.RegisterJsBlockStartup( "openModalJs", JavaScriptBlocks.OpenModalJs("ImportDialog")); return; } try { var importedCount = DataImport.BBCodeExtensionImport( this.PageContext.PageBoardID, this.importFile.PostedFile.InputStream); this.PageContext.AddLoadMessage( importedCount > 0 ? this.GetTextFormatted("IMPORT_SUCESS", importedCount) : this.GetText("ADMIN_BBCODE_IMPORT", "IMPORT_NOTHING"), importedCount > 0 ? MessageTypes.success : MessageTypes.warning); } catch (Exception x) { this.PageContext.AddLoadMessage( string.Format(this.GetText("ADMIN_BBCODE_IMPORT", "IMPORT_FAILED"), x.Message), MessageTypes.danger); this.PageContext.PageElements.RegisterJsBlockStartup( "openModalJs", JavaScriptBlocks.OpenModalJs("ImportDialog")); } }
/// <summary> /// Main end point /// </summary> /// <param name="args"></param> static void Main(string[] args) { try { Console.WriteLine("Starting .."); Importer = new DataImport(); Importer.OnBeginImport += OnBeginDataImport; Importer.OnEndImport += OnEndDataImport; FileWatcherTimer = new Timer(OnFileWatcherTimer, null, Timeout.Infinite, Timeout.Infinite); FileWatcherTimer.Change(1000, Timeout.Infinite); Task waitingTask = new Task(() => { try { while (true) { Task.Delay(1000).Wait(); } } catch (Exception ex) { Console.WriteLine(ex.StackTrace); } }); waitingTask.Start(); waitingTask.Wait(); } catch (Exception ex) { Console.WriteLine(ex.StackTrace); } }
/// <summary> /// 导入为Dwg /// </summary> public void ImportToDwg() { //----------- this.fileWorkspace = new SuperMap.Data.Workspace(); //打开工作空间及地图 WorkspaceConnectionInfo conInfo = new WorkspaceConnectionInfo(@"..\..\template\temp.smwu"); fileWorkspace.Open(conInfo); dataImport = new DataImport(); // m_srcDatasource = fileWorkspace.Datasources["temp"]; importDatasource = fileWorkspace.Datasources["temp"]; //---------- try { dataImport.ImportSettings.Clear(); ImportSettingDWG dwgSetting = new ImportSettingDWG(); dwgSetting.ImportMode = ImportMode.Append; //dwgSetting.SourceFilePath = @"..\..\SampleData\DataExchange\DwgImport\Polyline.dwg"; dwgSetting.SourceFilePath = imgPath; dwgSetting.TargetDatasource = importDatasource; dwgSetting.ImportingAsCAD = true; dataImport.ImportSettings.Add(dwgSetting); dataImport.Run(); DatasetVector importResult = importDatasource.Datasets["Polyline"] as DatasetVector; this.mapControl1.Map.Workspace = fileWorkspace; mapControl1.Map.Layers.Clear(); mapControl1.Map.Layers.Add(importResult, true); mapControl1.Map.ViewEntire(); mapControl1.Map.Refresh(); } catch (Exception ex) { System.Diagnostics.Trace.WriteLine(ex.Message); } }
public async Task ExtractDataFiles(DataImport import) { var path = import.ZipFile.Path(); await using var zipBlobFileStream = await _blobStorageService.StreamBlob(PrivateReleaseFiles, path); using var archive = new ZipArchive(zipBlobFileStream); var file1 = archive.Entries[0]; var file2 = archive.Entries[1]; var dataFile = file1.Name.Contains(".meta.") ? file2 : file1; var metadataFile = file1.Name.Contains(".meta.") ? file1 : file2; await using (var rowStream = dataFile.Open()) await using (var stream = dataFile.Open()) { await _blobStorageService.UploadStream( containerName : PrivateReleaseFiles, path : import.File.Path(), stream : stream, contentType : "text/csv", metadata : GetDataFileMetaValues( metaFileName: metadataFile.Name, numberOfRows: CalculateNumberOfRows(rowStream) )); } await using (var stream = metadataFile.Open()) { await _blobStorageService.UploadStream( containerName : PrivateReleaseFiles, path : import.MetaFile.Path(), stream : stream, contentType : "text/csv"); } }
public static void PopulateParameters(nHydrate.Dsl.nHydrateModel model, DataImport.Function function, Function newFunction) { //Parameters //newFunction.Parameters.Clear(); foreach (var parameter in function.ParameterList) { var newParameter = newFunction.Parameters.FirstOrDefault(x => x.Name.ToLower() == parameter.Name.ToLower()); if (newParameter == null) { newParameter = new nHydrate.Dsl.FunctionParameter(model.Partition); newParameter.Name = parameter.Name; newParameter.SortOrder = parameter.SortOrder; //Correct for invalid identifiers if (!nHydrate.Dsl.ValidationHelper.ValidCodeIdentifier(newParameter.Name)) { newParameter.CodeFacade = nHydrate.Dsl.ValidationHelper.MakeCodeIdentifer(newParameter.Name, string.Empty); } newFunction.Parameters.Add(newParameter); } newParameter.Length = parameter.Length; newParameter.Nullable = parameter.Nullable; newParameter.DataType = (DataTypeConstants)Enum.Parse(typeof(DataTypeConstants), parameter.DataType.ToString()); newParameter.Default = parameter.DefaultValue; newParameter.Scale = parameter.Scale; } //Remove the parameters that need to be remove newFunction.Parameters.Remove(x => !function.ParameterList.Select(a => a.Name.ToLower()).ToList().Contains(x.Name.ToLower())); }
public async Task GetIncompleteImports() { var release = new Release { Slug = "test-release", Publication = new Publication { Title = "Test Publication" }, TimePeriodCoverage = CalendarYear, ReleaseName = "2000" }; var releaseFile1 = new ReleaseFile { File = new File { Filename = "file1.csv", Type = FileType.Data }, Release = release }; var releaseFile2 = new ReleaseFile { File = new File { Filename = "file2.csv", Type = FileType.Data }, Release = release }; var releaseFile3 = new ReleaseFile { File = new File { Filename = "file3.csv", Type = FileType.Data }, Release = release }; var import1 = new DataImport { File = releaseFile1.File, NumBatches = 1, Rows = 100, StagePercentageComplete = 99, Status = FAILED, SubjectId = Guid.NewGuid(), Created = DateTime.UtcNow.AddHours(-1) }; var import2 = new DataImport { File = releaseFile2.File, NumBatches = 2, Rows = 200, StagePercentageComplete = 54, Status = STAGE_1, SubjectId = Guid.NewGuid(), Created = DateTime.UtcNow }; var import3 = new DataImport { File = releaseFile3.File, NumBatches = 3, Rows = 300, StagePercentageComplete = 76, Status = STAGE_4, SubjectId = Guid.NewGuid(), Created = DateTime.UtcNow.AddDays(-1) }; var contextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryApplicationDbContext(contextId)) { await contentDbContext.Releases.AddAsync(release); await contentDbContext.ReleaseFiles.AddRangeAsync(releaseFile1, releaseFile2, releaseFile3); await contentDbContext.DataImports.AddRangeAsync(import1, import2, import3); await contentDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryApplicationDbContext(contextId)) { var importStatusBauService = BuildImportStatusBauService(contentDbContext: contentDbContext); var result = await importStatusBauService.GetAllIncompleteImports(); Assert.True(result.IsRight); var imports = result.Right; Assert.Equal(3, imports.Count); // Assert they are in descending timestamp order - import2, import1, import3 Assert.Equal(import2.File.Id, imports[0].FileId); Assert.Null(imports[0].SubjectTitle); Assert.Equal(import2.SubjectId, imports[0].SubjectId); Assert.Equal(release.Publication.Id, imports[0].PublicationId); Assert.Equal(release.Publication.Title, imports[0].PublicationTitle); Assert.Equal(release.Id, imports[0].ReleaseId); Assert.Equal(release.Title, imports[0].ReleaseTitle); Assert.Equal(import2.File.Filename, imports[0].DataFileName); Assert.Equal(import2.Rows, imports[0].Rows); Assert.Equal(import2.NumBatches, imports[0].Batches); Assert.Equal(import2.Status, imports[0].Status); Assert.Equal(import2.StagePercentageComplete, imports[0].StagePercentageComplete); Assert.Equal(import2.PercentageComplete(), imports[0].PercentageComplete); Assert.Equal(import1.File.Id, imports[1].FileId); Assert.Null(imports[1].SubjectTitle); Assert.Equal(import1.SubjectId, imports[1].SubjectId); Assert.Equal(release.Publication.Id, imports[1].PublicationId); Assert.Equal(release.Publication.Title, imports[1].PublicationTitle); Assert.Equal(release.Id, imports[1].ReleaseId); Assert.Equal(release.Title, imports[1].ReleaseTitle); Assert.Equal(import1.File.Filename, imports[1].DataFileName); Assert.Equal(import1.Rows, imports[1].Rows); Assert.Equal(import1.NumBatches, imports[1].Batches); Assert.Equal(import1.Status, imports[1].Status); Assert.Equal(import1.StagePercentageComplete, imports[1].StagePercentageComplete); Assert.Equal(import1.PercentageComplete(), imports[1].PercentageComplete); Assert.Equal(import3.File.Id, imports[2].FileId); Assert.Null(imports[2].SubjectTitle); Assert.Equal(import3.SubjectId, imports[2].SubjectId); Assert.Equal(release.Publication.Id, imports[2].PublicationId); Assert.Equal(release.Publication.Title, imports[2].PublicationTitle); Assert.Equal(release.Id, imports[2].ReleaseId); Assert.Equal(release.Title, imports[2].ReleaseTitle); Assert.Equal(import3.File.Filename, imports[2].DataFileName); Assert.Equal(import3.Rows, imports[2].Rows); Assert.Equal(import3.NumBatches, imports[2].Batches); Assert.Equal(import3.StagePercentageComplete, imports[2].StagePercentageComplete); Assert.Equal(import3.PercentageComplete(), imports[2].PercentageComplete); } }
public override void Run() { try { //// WorkspaceConnectionInfo conInfo = new WorkspaceConnectionInfo(@"D:\Program Files (x86)\SuperMap\SuperMap iDesktop 7C\SampleData\World\World.smwu"); // WorkspaceControl workspaceControl // = SuperMap.Desktop.Application.ActiveApplication.MainForm.DockBarManager[typeof(WorkspaceControlManager)].Control as WorkspaceControl; // if (workspaceControl != null) // { // workspaceControl.ResourcesNodeVisible = false; // workspaceControl.LayoutsNodeVisible = false; // //workspaceControl.WorkspaceTree.Workspace.Open(conInfo); // WorkspaceConnectionInfo conInfo1 = new WorkspaceConnectionInfo(@"..\..\Default.smwu"); // //workspaceControl.WorkspaceTree.Workspace.Create(conInfo1); // //workspaceControl.WorkspaceTree.Workspace.Close(); // //workspaceControl.WorkspaceTree.Workspace.Dispose(); // bool dd= workspaceControl.WorkspaceTree.Workspace.Open(conInfo1); // workspaceControl.WorkspaceTree.Workspace.Datasources.Create() // //workspaceControl.Update(); // //workspaceControl.WorkspaceTree.WorkspaceNode.Nodes.Add("dafsaf"); // // workspaceControl.WorkspaceTree.Update(); // } // System.Windows.Forms.MessageBox.Show("LoadImageCtrlAction"); //IFormMap activeMapForm = Application.ActiveForm as IFormMap; //MapControl activeMap = activeMapForm.MapControl; //IFormManager dd= Application.ActiveApplication.MainForm.FormManager; //for (int i = 0; i < dd.Count; i++) //{ //} WorkspaceControl workspaceControl = SuperMap.Desktop.Application.ActiveApplication.MainForm.DockBarManager[typeof(WorkspaceControlManager)].Control as WorkspaceControl; Datasource targetDatasource; DatasourceConnectionInfo dataSourceConnectionInfo = new DatasourceConnectionInfo(); dataSourceConnectionInfo.Server = @"..\..\Default"; if (workspaceControl != null) { workspaceControl.ResourcesNodeVisible = false; workspaceControl.LayoutsNodeVisible = false; Form1 form1 = new Form1(); form1.Show(); try { bool isExist = workspaceControl.WorkspaceTree.Workspace.Datasources.Contains(dataSourceConnectionInfo.Alias); if (isExist) { targetDatasource = workspaceControl.WorkspaceTree.Workspace.Datasources[dataSourceConnectionInfo.Alias]; } else { targetDatasource = workspaceControl.WorkspaceTree.Workspace.Datasources.Open(dataSourceConnectionInfo); } } catch (Exception ex1) { targetDatasource = workspaceControl.WorkspaceTree.Workspace.Datasources.Create(dataSourceConnectionInfo); } OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "影像文件(*.tif)|*.*"; ofd.ValidateNames = true; ofd.CheckPathExists = true; ofd.CheckFileExists = true; ofd.ShowHelp = true; //ofd.HelpRequest += ofd_HelpRequest; if (ofd.ShowDialog() == DialogResult.OK) { string strFileName = ofd.FileName; //其他代码 DataImport m_dataImport = new DataImport(); m_dataImport.ImportSettings.Clear(); //ImportSettingIMG imgSetting = new ImportSettingIMG(); //imgSetting.ImportMode = ImportMode.Overwrite; //imgSetting.SourceFilePath = @"..\..\SampleData\DataExchange\ImgImport\Multibands.img"; //imgSetting.TargetDatasource = m_desDatasource; //imgSetting.MultiBandImportMode = MultiBandImportMode.MultiBand; //m_dataImport.ImportSettings.Add(imgSetting); //m_dataImport.Run(); //DatasetImage importResult = m_desDatasource.Datasets["Multibands"] as DatasetImage; //LayerSettingImage layerSetting = new LayerSettingImage(); //layerSetting.DisplayBandIndexes = new Int32[] { 3, 2, 1 }; //layerSetting.DisplayColorSpace = ColorSpaceType.RGB; ImportSettingTIF importSetingTif = new ImportSettingTIF(); importSetingTif.ImportMode = ImportMode.Overwrite; importSetingTif.SourceFilePath = strFileName; PrjCoordSys prgCoord = importSetingTif.GetSourcePrjCoordSys(); Charset pCharset = importSetingTif.SourceFileCharset; ImportDataInfos importDataInfoTif = importSetingTif.GetTargetDataInfos(""); importSetingTif.SetTargetDataInfos(importDataInfoTif); //ImportDataInfoTIF importDataInfoTif1 = null; importSetingTif.TargetDatasource = targetDatasource; m_dataImport.ImportSettings.Add(importSetingTif); m_dataImport.Run(); } } } catch (Exception ex) { SuperMap.Desktop.Application.ActiveApplication.Output.Output(ex.StackTrace); } }
public DataImportViewModel(DataImport owner) { _owner = owner; this.StartImportCommand = new DelegateCommand(this.OnStartImport); this.SelectFileCommand = new DelegateCommand(this.OnSelectFile); }
public string mo;//定义全局变量:模板路径 /// <summary> /// 初始化 /// </summary> public ImportTool(SuperMap.Data.Workspace workspace) { m_workspace = workspace; m_dataImport = new DataImport(); }
public void CanLoadBooks() { List <Book> Books = DataImport.ImportCSV(); Assert.IsNotEmpty(Books); }
public string ImportCSV(string targetName,string importPath, DatasourceConnectionInfo info) { try { ImportSettingCSV importSettingCSV = new ImportSettingCSV(); importSettingCSV.ImportMode = ImportMode.Overwrite;//可复写 importSettingCSV.FirstRowIsField = true; importSettingCSV.SourceFilePath = importPath; importSettingCSV.TargetDatasourceConnectionInfo = info; importSettingCSV.TargetDatasetName = targetName; DataImport import1 = new DataImport(); import1.ImportSettings.Add(importSettingCSV); ImportResult dd= import1.Run();//.GetSucceedDatasetNames(importSettingCSV); if(dd.FailedSettings.Length!=0) return "【属性表导入】"+targetName+"导入失败!请检查数据是否含有有效记录。\t\n"; return null; } catch (Exception ex) { MessageBox.Show(ex.Message); return null; } }
/// <summary> /// udb中导入tiff /// </summary> /// <param name="importPath"></param> public string ImportTIFF(string importPath, DatasourceConnectionInfo info) { try { string name = importPath.Substring(importPath.LastIndexOf("\\") + 1); // 1. 构建数据源连接对象。 //DatasourceConnectionInfo info = new DatasourceConnectionInfo(); //info.Server = sourceUDB; // 2. 构建SHP导入设置对象(ImportSettingSHP),设置数据源,设置导入数据路径。 //info.Password = "******"; ImportSettingTIF importSettingTIF = new ImportSettingTIF(); //m_workspace.Datasources.Open(info); importSettingTIF.ImportMode = ImportMode.Overwrite;//可复写 importSettingTIF.SourceFilePath = importPath; importSettingTIF.TargetDatasourceConnectionInfo = info; importSettingTIF.ImportingAsGrid = true;//栅格数据集形式 // 3. 获取导入设置对象的导入信息集合(ImportDataInfos),设置目标数据集的名字。 // ImportDataInfo dataInfos = importSettingSHP.GetTargetDataInfos(""); //importSettingSHP.SetTargetDataInfos(dataInfos); // 4. 构建数据导入类对象(DataImport),构建并设置导入设置对象集合。 //string prjRef = @"G:\移动风险监测\参考坐标\CGCS_2000.xml"; PrjCoordSys prj = new PrjCoordSys(); // prj.FromFile(prjRef, PrjFileType.SuperMap); prj.Type = PrjCoordSysType.SphereMercator; importSettingTIF.TargetPrjCoordSys = prj;//设置了参考投影,还需改变其投影转换 DataImport import1 = new DataImport(); ImportSettings settings = import1.ImportSettings; settings.Add(importSettingTIF); ImportResult dd= import1.Run(); i++; if (dd.FailedSettings.Length != 0) return "【tif数据导入】" + name + "导入失败!请检查数据是否有效。\t\n"; return null; } catch (Exception ex) { MessageBox.Show(ex.Message); return null; } }
public async Task ListSubjects() { var statisticsRelease = new Data.Model.Release(); var releaseSubject1 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject(), DataGuidance = "Guidance 1" }; var releaseSubject2 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject(), DataGuidance = "Guidance 2" }; var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.AddRangeAsync(releaseSubject1, releaseSubject2); await statisticsDbContext.SaveChangesAsync(); } var contentRelease = new Release { Id = statisticsRelease.Id, }; var releaseFile1 = new ReleaseFile { Release = contentRelease, Name = "Subject 1", File = new File { Filename = "data1.csv", Type = FileType.Data, SubjectId = releaseSubject1.Subject.Id }, }; var releaseFile2 = new ReleaseFile { Release = contentRelease, Name = "Subject 2", File = new File { Filename = "data2.csv", Type = FileType.Data, SubjectId = releaseSubject2.Subject.Id, } }; var import1 = new DataImport { File = releaseFile1.File, Status = DataImportStatus.COMPLETE }; var import2 = new DataImport { File = releaseFile2.File, Status = DataImportStatus.COMPLETE }; var contentDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) { await contentDbContext.AddRangeAsync(releaseFile1, releaseFile2); await contentDbContext.AddRangeAsync(import1, import2); await contentDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var dataGuidanceSubjectService = new Mock <IDataGuidanceSubjectService>(); var timePeriodService = new Mock <ITimePeriodService>(); timePeriodService .Setup(s => s.GetTimePeriodLabels(releaseSubject1.SubjectId)) .Returns(new TimePeriodLabels("2020/21", "2021/22")); timePeriodService .Setup(s => s.GetTimePeriodLabels(releaseSubject2.SubjectId)) .Returns(new TimePeriodLabels("2030", "2031")); dataGuidanceSubjectService .Setup(s => s.GetGeographicLevels(releaseSubject1.SubjectId)) .ReturnsAsync(ListOf("Local Authority", "Local Authority District")); dataGuidanceSubjectService .Setup(s => s.GetGeographicLevels(releaseSubject2.SubjectId)) .ReturnsAsync(ListOf("National")); var fileInfoGetter = new Mock <IReleaseService.IBlobInfoGetter>(MockBehavior.Strict); fileInfoGetter .Setup( s => s.Get( It.Is <ReleaseFile>(rf => rf.Id == releaseFile1.Id) ) ) .ReturnsAsync( new BlobInfo( path: releaseFile1.Path(), size: "1 Mb", contentType: "text/csv", contentLength: 0L ) ); fileInfoGetter .Setup( s => s.Get( It.Is <ReleaseFile>(rf => rf.Id == releaseFile2.Id) ) ) .ReturnsAsync( new BlobInfo( path: releaseFile2.Path(), size: "2 Mb", contentType: "text/csv", contentLength: 0L ) ); var service = BuildReleaseService( contentDbContext: contentDbContext, statisticsDbContext: statisticsDbContext, dataGuidanceSubjectService: dataGuidanceSubjectService.Object, timePeriodService: timePeriodService.Object, fileSizeGetter: fileInfoGetter.Object ); var result = await service.ListSubjects(contentRelease.Id); MockUtils.VerifyAllMocks(dataGuidanceSubjectService, fileInfoGetter); var subjects = result.AssertRight(); Assert.NotNull(subjects); Assert.Equal(2, subjects.Count); Assert.Equal(releaseSubject1.Subject.Id, subjects[0].Id); Assert.Equal(releaseFile1.Name, subjects[0].Name); Assert.Equal(releaseFile1.File.Id, subjects[0].File.Id); Assert.Equal(releaseFile1.File.Filename, subjects[0].File.FileName); Assert.Equal("1 Mb", subjects[0].File.Size); Assert.Equal("csv", subjects[0].File.Extension); Assert.Equal(releaseSubject1.DataGuidance, subjects[0].Content); Assert.Equal("2020/21", subjects[0].TimePeriods.From); Assert.Equal("2021/22", subjects[0].TimePeriods.To); Assert.Equal(2, subjects[0].GeographicLevels.Count); Assert.Equal("Local Authority", subjects[0].GeographicLevels[0]); Assert.Equal("Local Authority District", subjects[0].GeographicLevels[1]); Assert.Equal(releaseSubject2.Subject.Id, subjects[1].Id); Assert.Equal(releaseFile2.Name, subjects[1].Name); Assert.Equal(releaseFile2.File.Id, subjects[1].File.Id); Assert.Equal(releaseFile2.File.Filename, subjects[1].File.FileName); Assert.Equal("2 Mb", subjects[1].File.Size); Assert.Equal(releaseSubject2.DataGuidance, subjects[1].Content); Assert.Equal("csv", subjects[1].File.Extension); Assert.Equal("2030", subjects[1].TimePeriods.From); Assert.Equal("2031", subjects[1].TimePeriods.To); Assert.Single(subjects[1].GeographicLevels); Assert.Equal("National", subjects[1].GeographicLevels[0]); } }
public async Task ListSubjects_FiltersImportingSubjects() { var statisticsRelease = new Data.Model.Release(); var releaseSubject1 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject { Id = Guid.NewGuid() } }; var releaseSubject2 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject { Id = Guid.NewGuid(), } }; var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.AddRangeAsync(releaseSubject1, releaseSubject2); await statisticsDbContext.SaveChangesAsync(); } var contentRelease = new Release { Id = statisticsRelease.Id, }; var releaseFile1 = new ReleaseFile { Name = "Data 1", Release = contentRelease, File = new File { Filename = "data1.csv", Type = FileType.Data, SubjectId = releaseSubject1.Subject.Id } }; var releaseFile2 = new ReleaseFile { Name = "Data 2", Release = contentRelease, File = new File { Filename = "data2.csv", Type = FileType.Data, SubjectId = releaseSubject2.Subject.Id, } }; var import1 = new DataImport { File = releaseFile1.File, Status = DataImportStatus.STAGE_1 }; var import2 = new DataImport { File = releaseFile2.File, Status = DataImportStatus.COMPLETE }; var contentDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) { await contentDbContext.AddRangeAsync(releaseFile1, releaseFile2); await contentDbContext.AddRangeAsync(import1, import2); await contentDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildReleaseService( contentDbContext: contentDbContext, statisticsDbContext: statisticsDbContext ); var result = await service.ListSubjects(contentRelease.Id); var subjects = result.AssertRight(); Assert.Single(subjects); Assert.Equal(releaseSubject2.Subject.Id, subjects[0].Id); Assert.Equal(releaseFile2.Name, subjects[0].Name); } }
public async Task ListSubjects_FiltersPendingReplacementSubjects() { var statisticsRelease = new Data.Model.Release(); var releaseSubject1 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject(), }; var releaseSubject2 = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject(), }; var releaseSubject2Replacement = new ReleaseSubject { Release = statisticsRelease, Subject = new Subject(), }; var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.AddRangeAsync(releaseSubject1, releaseSubject2, releaseSubject2Replacement); await statisticsDbContext.SaveChangesAsync(); } var contentRelease = new Release { Id = statisticsRelease.Id, }; var releaseFile1 = new ReleaseFile { Release = contentRelease, Name = "Subject 1", File = new File { Filename = "data1.csv", Type = FileType.Data, SubjectId = releaseSubject1.Subject.Id } }; var file2 = new File { Filename = "data2.csv", Type = FileType.Data, SubjectId = releaseSubject2.Subject.Id, }; var file2Replacement = new File { Filename = "data2_replacement.csv", Type = FileType.Data, SubjectId = releaseSubject2Replacement.Subject.Id, Replacing = file2 }; file2.ReplacedBy = file2Replacement; var releaseFile2 = new ReleaseFile { Release = contentRelease, Name = "Subject 2", File = file2 }; var releaseFile2Replacement = new ReleaseFile { Release = contentRelease, Name = "Subject 2 Replacement", File = file2Replacement }; var import1 = new DataImport { File = releaseFile1.File, Status = DataImportStatus.COMPLETE }; var import2 = new DataImport { File = releaseFile2.File, Status = DataImportStatus.COMPLETE }; var contentDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) { await contentDbContext.AddRangeAsync(releaseFile1, releaseFile2, releaseFile2Replacement); await contentDbContext.AddRangeAsync(import1, import2); await contentDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildReleaseService( contentDbContext: contentDbContext, statisticsDbContext: statisticsDbContext ); var result = await service.ListSubjects(contentRelease.Id); var subjects = result.AssertRight(); Assert.NotNull(subjects); Assert.Equal(2, subjects.Count); Assert.Equal(releaseSubject1.Subject.Id, subjects[0].Id); Assert.Equal(releaseFile1.Name, subjects[0].Name); Assert.Equal(releaseSubject2.Subject.Id, subjects[1].Id); Assert.Equal(releaseFile2.Name, subjects[1].Name); } }
static void test3() { DataImport di = new DataImport(); di.ImportXml("凯宾斯基.xml", new LanData()); }
public static void PopulateFields(nHydrate.Dsl.nHydrateModel model, DataImport.StoredProc storedProc, StoredProcedure newStoredProc) { if (storedProc.ColumnFailure) return; //Fields //newStoredProc.Fields.Clear(); foreach (var field in storedProc.FieldList) { var newField = newStoredProc.Fields.FirstOrDefault(x => x.Name.ToLower() == field.Name.ToLower()); if (newField == null) { newField = new nHydrate.Dsl.StoredProcedureField(model.Partition); newField.Name = field.Name; newStoredProc.Fields.Add(newField); //Correct for invalid identifiers if (!nHydrate.Dsl.ValidationHelper.ValidCodeIdentifier(newField.Name)) { newField.CodeFacade = nHydrate.Dsl.ValidationHelper.MakeCodeIdentifer(newField.Name, string.Empty); } } newField.Length = field.Length; newField.Nullable = field.Nullable; newField.DataType = (DataTypeConstants)Enum.Parse(typeof(DataTypeConstants), field.DataType.ToString()); newField.Default = field.DefaultValue; newField.Scale = field.Scale; } //Remove the fields that need to be remove newStoredProc.Fields.Remove(x => !storedProc.FieldList.Select(a => a.Name.ToLower()).ToList().Contains(x.Name.ToLower())); }
private ImportFilesObj[] AutoFindFile(IRasterDataProvider dataProvider) { return(DataImport.AutoFindFiles(_monitoringSession.ActiveMonitoringProduct.Identify, _monitoringSession.ActiveMonitoringSubProduct.Identify, dataProvider, _defaultImpotFileDir, null)); }
public DataImport BuildDataImpoter(DataGridView dgvCellTable) { DataImport import = new DataImport(); Dictionary<string, string> dictionary = new Dictionary<string, string>(); DataTable table = new DataTable(); for (int i = 0; i < dgvCellTable.ColumnCount; i++) { string name = dgvCellTable.Columns[i].Name; string text2 = name.Substring(name.IndexOf("bx") + 2); dictionary.Add(dgvCellTable.Columns[i].HeaderText, text2); table.Columns.Add(text2); } import.ColHeaderMap = dictionary; import.ImportTable = table; return import; }
private void btnMVGXMLParser_Click(object sender, EventArgs e) { string error = string.Empty; IDataImportDriver driver = DataImport.GetDriver("FIR", "PLST", @"D:\MAS_Workspace\OutputItem\Fire\MultiValueGraph\20130311\FIR_DBLV_NOAA18_AVHRR_1000M_NUL_P001_20120407061200.mvg", null); }
private void m_ImportToolStripMenuItem_Click(object sender, EventArgs e) { this.import = new DataImport(); this.import.importDataEvent += new EventHandler<ImportDataArgs>(this.import_importDataEvent); Dictionary<string, string> dictionary = new Dictionary<string, string>(); DataTable table = new DataTable(); for (int i = 0; i < this.m_GridCellService.ColumnCount; i++) { dictionary.Add(this.m_GridCellService.Columns[i].HeaderText, this.m_GridCellService.Columns[i].Name); table.Columns.Add(this.m_GridCellService.Columns[i].Name); } this.import.ColHeaderMap = dictionary; this.import.ImportTable = table; if (this.import.ShowDialog() == DialogResult.Cancel) { } }
public void ImportTIFFTest(string importPath, string sourceUDB) { try { // 1. 构建数据源连接对象。 DatasourceConnectionInfo info = new DatasourceConnectionInfo(); info.Server = sourceUDB; WorkspaceConnectionInfo ConnectionInfo = new WorkspaceConnectionInfo(sourceUDB); SuperMap.Data.Workspace wps = new SuperMap.Data.Workspace(); wps.Create(ConnectionInfo); Datasources ds = wps.Datasources; //ds.Open(info); Datasource dss = ds.Create(info); // new Datasource(); //dss.Connect(); // 2. 构建SHP导入设置对象(ImportSettingSHP),设置数据源,设置导入数据路径。 ImportSettingTIF importSettingTIF = new ImportSettingTIF(); importSettingTIF.ImportMode = ImportMode.Overwrite;//可复写 importSettingTIF.SourceFilePath = importPath; importSettingTIF.TargetDatasourceConnectionInfo = info; importSettingTIF.ImportingAsGrid = true;//栅格数据集形式 // 3. 获取导入设置对象的导入信息集合(ImportDataInfos),设置目标数据集的名字。 // ImportDataInfo dataInfos = importSettingSHP.GetTargetDataInfos(""); //importSettingSHP.SetTargetDataInfos(dataInfos); // 4. 构建数据导入类对象(DataImport),构建并设置导入设置对象集合。 PrjCoordSys prj = new PrjCoordSys(); prj.Type = PrjCoordSysType.SphereMercator; importSettingTIF.TargetPrjCoordSys = prj;//设置了参考投影,还需改变其投影转换 //prj.GeoCoordSys.FromXML(readXML()) DataImport import1 = new DataImport(); ImportSettings settings = import1.ImportSettings; settings.Add(importSettingTIF); import1.Run(); try { int m = importPath.LastIndexOf('\\'); string dsName = importPath.Substring(m + 1); int n = dsName.LastIndexOf('.'); string dsname = dsName.Substring(0, n); Datasets datasets = dss.Datasets; Dataset m_processDataset = datasets[dsname]; PrjCoordSys prj1 = new PrjCoordSys(); prj1.FromXML(readXML(@"G:\移动风险监测\参考坐标\CGCS_2000.xml")); Boolean result = CoordSysTranslator.Convert(m_processDataset, prj1, new CoordSysTransParameter(), CoordSysTransMethod.GeocentricTranslation); } catch (Exception ex) { MessageBox.Show(ex.Message); } i++; } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// 建立商品資訊 /// </summary> private void CreateProductDdeData() { //讀取股期商品CVS資料;// DataImport DataImport = new DataImport(); m_dicSymbolStock = DataImport.ImportStockCSV("./StockInfo.csv"); //讀取指數商品CVS代號;// m_dicSymbolIndex = DataImport.ImportIndexCSV("./IndexInfo.csv"); //建立股票期貨類的DDE連線;// if (!CreateDdeLink(m_dicSymbolStock, true)) return; //建立指數期貨類的DDE連線;// if (!CreateDdeLink(m_dicSymbolIndex, false)) return; DataImport = null; }
public async Task ListFeaturedTables() { var releaseId = Guid.NewGuid(); var release = new Release { Id = releaseId }; var releaseSubject1 = new ReleaseSubject { Release = new Data.Model.Release { Id = releaseId }, Subject = new Subject { Id = Guid.NewGuid() } }; var releaseSubject2 = new ReleaseSubject { Release = new Data.Model.Release { Id = releaseId }, Subject = new Subject { Id = Guid.NewGuid() } }; var releaseFile1 = new ReleaseFile { Name = "Data 1", Release = release, File = new File { Filename = "data1.csv", Type = FileType.Data, SubjectId = releaseSubject1.Subject.Id } }; var releaseFile2 = new ReleaseFile { Name = "Data 2", Release = release, File = new File { Filename = "data2.csv", Type = FileType.Data, SubjectId = releaseSubject2.Subject.Id } }; var import1 = new DataImport { File = releaseFile1.File, Status = DataImportStatus.COMPLETE }; var import2 = new DataImport { File = releaseFile2.File, Status = DataImportStatus.COMPLETE }; var dataBlock1 = new DataBlock { Name = "Test data block 1", HighlightName = "Test highlight name 1", HighlightDescription = "Test highlight description 1", Query = new ObservationQueryContext { SubjectId = releaseSubject1.Subject.Id, } }; var dataBlock2 = new DataBlock { Name = "Test data block 2", HighlightName = "Test highlight name 2", HighlightDescription = "Test highlight description 2", Query = new ObservationQueryContext { SubjectId = releaseSubject1.Subject.Id, } }; var contentDbContextId = Guid.NewGuid().ToString(); var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) { await contentDbContext.AddAsync(release); await contentDbContext.AddRangeAsync(releaseFile1, releaseFile2); await contentDbContext.AddRangeAsync(import1, import2); // Order is reversed await contentDbContext.AddRangeAsync( new ReleaseContentBlock { Release = release, ContentBlock = dataBlock2 }, new ReleaseContentBlock { Release = release, ContentBlock = dataBlock1 } ); await contentDbContext.SaveChangesAsync(); } await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.AddAsync(releaseSubject1); await statisticsDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildReleaseService( contentDbContext: contentDbContext, statisticsDbContext: statisticsDbContext ); var result = await service.ListFeaturedTables(release.Id); var featuredTables = result.AssertRight(); Assert.Equal(2, featuredTables.Count); Assert.Equal(dataBlock1.Id, featuredTables[0].Id); Assert.Equal(dataBlock1.HighlightName, featuredTables[0].Name); Assert.Equal(dataBlock1.HighlightDescription, featuredTables[0].Description); Assert.Equal(dataBlock2.Id, featuredTables[1].Id); Assert.Equal(dataBlock2.HighlightName, featuredTables[1].Name); Assert.Equal(dataBlock2.HighlightDescription, featuredTables[1].Description); } }
public static void PopulateFields(nHydrate.Dsl.nHydrateModel model, Module module, DataImport.Entity importItem, Entity targetItem) { foreach (var field in importItem.FieldList) { var newField = targetItem.Fields.FirstOrDefault(x => x.Name.ToLower() == field.Name.ToLower()); if (newField == null) newField = new nHydrate.Dsl.Field(model.Partition); //Add module if necessary if (module != null && !newField.Modules.Contains(module)) { newField.Modules.Add(module); } if (!targetItem.Fields.Contains(newField)) targetItem.Fields.Add(newField); newField.SortOrder = field.SortOrder; newField.Name = field.Name; newField.DataType = (DataTypeConstants)Enum.Parse(typeof(DataTypeConstants), field.DataType.ToString()); newField.Length = field.Length; newField.Nullable = field.Nullable; newField.IsCalculated = field.IsComputed; newField.Default = field.DefaultValue; newField.IsUnique = field.IsUnique; newField.Formula = field.Formula; newField.Identity = (field.Identity ? IdentityTypeConstants.Database : IdentityTypeConstants.None); newField.IsPrimaryKey = field.PrimaryKey; newField.Scale = field.Scale; newField.ImportedDefaultName = field.ImportedDefaultName; //DO NOT IMPORT METADATA //Correct for invalid identifiers //if (!nHydrate.Dsl.ValidationHelper.ValidCodeIdentifier(newField.Name) && !nHydrate.Dsl.ValidationHelper.IsReservedWord(newField.Name)) if (!nHydrate.Dsl.ValidationHelper.ValidCodeIdentifier(newField.Name)) { newField.CodeFacade = nHydrate.Dsl.ValidationHelper.MakeCodeIdentifer(newField.Name, string.Empty); } } var removedFields = targetItem.Fields.Remove(x => !importItem.FieldList.Select(y => y.Name.ToLower()).Contains(x.Name.ToLower())); }
private void importToolStripMenuItem_Click(object sender, EventArgs e) { this.m_DateImport = this.m_Editor.BuildSiteImportFrom(this.m_SiteNode); if (!string.IsNullOrEmpty(this.m_SiteFilePath)) { this.m_DateImport.pfd.InitialDirectory = this.m_SiteFilePath; } this.m_DateImport.importDataEvent += new EventHandler<ImportDataArgs>(this.import_importDataEvent); if (this.m_DateImport.ShowDialog() != DialogResult.Cancel) { this.m_SiteFilePath = Path.GetDirectoryName(this.m_DateImport.pfd.FileName); } }
public async Task ListFeaturedTables_FiltersNonMatchingSubjects() { var releaseId = Guid.NewGuid(); var release = new Release { Id = releaseId }; var releaseSubject1 = new ReleaseSubject { Release = new Data.Model.Release { Id = releaseId }, Subject = new Subject { Id = Guid.NewGuid() } }; var releaseFile1 = new ReleaseFile { Name = "Data 1", Release = release, File = new File { Filename = "data1.csv", Type = FileType.Data, SubjectId = releaseSubject1.Subject.Id } }; var import1 = new DataImport { File = releaseFile1.File, Status = DataImportStatus.COMPLETE }; // Subject does not match var dataBlock1 = new DataBlock { Name = "Test data block", HighlightName = "Test highlight name", HighlightDescription = "Test highlight description", Query = new ObservationQueryContext { SubjectId = Guid.NewGuid(), } }; var contentDbContextId = Guid.NewGuid().ToString(); var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) { await contentDbContext.AddAsync(release); await contentDbContext.AddAsync(releaseFile1); await contentDbContext.AddAsync(import1); await contentDbContext.AddRangeAsync( new ReleaseContentBlock { Release = release, ContentBlock = dataBlock1 } ); await contentDbContext.SaveChangesAsync(); } await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.AddAsync(releaseSubject1); await statisticsDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryContentDbContext(contentDbContextId)) await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildReleaseService( contentDbContext: contentDbContext, statisticsDbContext: statisticsDbContext ); var result = await service.ListFeaturedTables(release.Id); var featuredTables = result.AssertRight(); Assert.Empty(featuredTables); } }
public async Task HasIncompleteImports_ReleaseHasCompletedImports() { var release1 = new Release(); var release2 = new Release(); var release1File1 = new File { Type = FileType.Data }; var release1File2 = new File { Type = FileType.Data }; var release2File1 = new File { Type = FileType.Data }; var release1Import1 = new DataImport { File = release1File1, Status = DataImportStatus.COMPLETE }; var release1Import2 = new DataImport { File = release1File2, Status = DataImportStatus.COMPLETE }; // Incomplete imports for other Releases should be ignored var release2Import1 = new DataImport { File = release2File1, Status = DataImportStatus.STAGE_1 }; var contentDbContextId = Guid.NewGuid().ToString(); await using (var contentDbContext = InMemoryApplicationDbContext(contentDbContextId)) { await contentDbContext.ReleaseFiles.AddRangeAsync( new ReleaseFile { Release = release1, File = release1File1 }, new ReleaseFile { Release = release1, File = release1File2 }, new ReleaseFile { Release = release2, File = release2File1 }); await contentDbContext.DataImports.AddRangeAsync(release1Import1, release1Import2, release2Import1); await contentDbContext.SaveChangesAsync(); } await using (var contentDbContext = InMemoryApplicationDbContext(contentDbContextId)) { var service = BuildDataImportService(contentDbContext: contentDbContext); var result = await service.HasIncompleteImports(release1.Id); Assert.False(result); } }
public async Task CheckComplete_LastBatchFileCompleted_HasErrors() { var file = new File { Id = Guid.NewGuid(), Filename = "my_data_file.csv" }; var import = new DataImport { Id = Guid.NewGuid(), Errors = new List <DataImportError> { new DataImportError("an error") }, FileId = file.Id, File = file, SubjectId = Guid.NewGuid(), Status = STAGE_4, NumBatches = 2, TotalRows = 2 }; var batchService = new Mock <IBatchService>(Strict); var dataImportService = new Mock <IDataImportService>(Strict); batchService .Setup(s => s.GetNumBatchesRemaining(import.File)) .ReturnsAsync(0); dataImportService .Setup(s => s.GetImport(import.Id)) .ReturnsAsync(import); dataImportService .Setup(s => s.UpdateStatus( import.Id, FAILED, 100)) .Returns(Task.CompletedTask); var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.Observation.AddRangeAsync( new Observation { SubjectId = import.SubjectId }, new Observation { SubjectId = import.SubjectId }); await statisticsDbContext.SaveChangesAsync(); } await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildFileImportService(batchService: batchService.Object, dataImportService: dataImportService.Object); var message = new ImportObservationsMessage { Id = import.Id }; await service.CheckComplete(message, statisticsDbContext); } MockUtils.VerifyAllMocks(batchService, dataImportService); }
public async Task CheckComplete_LastBatchFileCompleted_HasIncorrectObservationCount() { var file = new File { Id = Guid.NewGuid(), Filename = "my_data_file.csv" }; var import = new DataImport { Id = Guid.NewGuid(), Errors = new List <DataImportError>(), FileId = file.Id, File = file, SubjectId = Guid.NewGuid(), Status = STAGE_4, NumBatches = 2, TotalRows = 3 }; var batchService = new Mock <IBatchService>(Strict); var dataImportService = new Mock <IDataImportService>(Strict); batchService .Setup(s => s.GetNumBatchesRemaining(import.File)) .ReturnsAsync(0); dataImportService .Setup(s => s.GetImport(import.Id)) .ReturnsAsync(import); dataImportService .Setup(s => s.FailImport(import.Id, $"Number of observations inserted (2) does not equal that expected ({import.TotalRows}) : Please delete & retry")) .Returns(Task.CompletedTask); var statisticsDbContextId = Guid.NewGuid().ToString(); await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { await statisticsDbContext.Observation.AddRangeAsync( new Observation { SubjectId = import.SubjectId }, new Observation { SubjectId = import.SubjectId }); await statisticsDbContext.SaveChangesAsync(); } await using (var statisticsDbContext = InMemoryStatisticsDbContext(statisticsDbContextId)) { var service = BuildFileImportService(batchService: batchService.Object, dataImportService: dataImportService.Object); var message = new ImportObservationsMessage { Id = import.Id }; await service.CheckComplete(message, statisticsDbContext); } MockUtils.VerifyAllMocks(batchService, dataImportService); }
private async Task SplitFiles( DataImport dataImport, DataTable dataFileTable) { var colValues = CsvUtil.GetColumnValues(dataFileTable.Columns); var batches = dataFileTable.Rows.OfType <DataRow>().Batch(dataImport.RowsPerBatch); var batchCount = 1; var numRows = dataFileTable.Rows.Count + 1; var numBatches = (int)Math.Ceiling((double)dataFileTable.Rows.Count / dataImport.RowsPerBatch); var existingBatchFiles = await _batchService.GetBatchFilesForDataFile(dataImport.File); var existingBatchFileNumbers = existingBatchFiles .AsQueryable() .Select(blobInfo => GetBatchNumberFromBatchFileName(blobInfo.FileName)); // TODO: EES-1608 - this flag keeps a track of whether any batch files have been generated to date. // It is used in a legacy check to determine whether or not to generate a "no rows" batch file. // EES-1608 will investigate what the circumstances are that could lead to a "no rows" batch file // situation, and whether this check can actually be entirely removed or not. var batchFilesExist = existingBatchFileNumbers.Any(); foreach (var batch in batches) { var currentStatus = await _dataImportService.GetImportStatus(dataImport.Id); if (currentStatus.IsFinishedOrAborting()) { _logger.LogInformation( $"Import for {dataImport.File.Filename} is finished or aborting - stopping creating batch files"); return; } if (existingBatchFileNumbers.Contains(batchCount)) { _logger.LogInformation($"Batch {batchCount} already exists - not recreating"); batchCount++; continue; } await using var stream = new MemoryStream(); var writer = new StreamWriter(stream); await writer.FlushAsync(); var table = new DataTable(); CopyColumns(dataFileTable, table); CopyRows(table, batch.ToList(), colValues, dataImport.HasSoleGeographicLevel()); var percentageComplete = (double)batchCount / numBatches * 100; await _dataImportService.UpdateStatus(dataImport.Id, DataImportStatus.STAGE_3, percentageComplete); // If no lines then don't create a batch unless it's the last one & there are zero // lines in total in which case create a zero lines batch if (table.Rows.Count == 0 && (batchCount != numBatches || batchFilesExist)) { _logger.LogInformation($"Skipping batch file for row count {table.Rows.Count} with batchCount {batchCount} and numBatches {numBatches} and batchFilesExist {batchFilesExist} and batch {batch.Count()}"); batchCount++; continue; } WriteDataTableToStream(table, writer); await writer.FlushAsync(); stream.Seek(0, SeekOrigin.Begin); await _blobStorageService.UploadStream( containerName : PrivateReleaseFiles, path : dataImport.File.BatchPath(batchCount), stream : stream, contentType : "text/csv", metadata : GetDataFileMetaValues( metaFileName: dataImport.MetaFile.Filename, numberOfRows: numRows )); batchFilesExist = true; batchCount++; } }