public void Show(string path, EfkPkg efkpkg) { title = MultiLanguageTextProvider.GetText("ImportEfkPkgTitle"); EfkPkg = efkpkg; sourceFilePath = Utils.Misc.BackSlashToSlash(path); // FileViewer Path or Current Effect Path or Current Directory var fileViewer = (Dock.FileViewer)Manager.GetWindow(typeof(Dock.FileViewer)); if (fileViewer != null && !string.IsNullOrEmpty(fileViewer.CurrentPath)) { targetDirPath = fileViewer.CurrentPath; } else if (!string.IsNullOrEmpty(Core.Root.GetFullPath())) { targetDirPath = Path.GetDirectoryName(Core.Root.GetFullPath()); } else { targetDirPath = Directory.GetCurrentDirectory(); } targetDirPath = Utils.Misc.BackSlashToSlash(targetDirPath); targetDirPathValid = Directory.Exists(targetDirPath); foreach (var file in efkpkg.AllFiles) { var import = new ImportFile(); import.DestinationName = file.RelativePath; import.DoesImport = true; import.ValidationPath(targetDirPath); importFiles.Add(file, import); } Manager.AddControl(this); }
private void ImportFile(ImportFile orig, ImportFile file) { if (this.FileExists(file) && !ImportSettings.OverwriteFile) { System.Diagnostics.Debug.WriteLine("File already exists and is skipped: " + file.OriginalFullName); FlatListSharePointDestination.Log.Warn((object)("File already exists and is skipped: " + file.OriginalFullName)); } else { System.Diagnostics.Debug.WriteLine("START Processing " + file.OriginalFullName); FlatListSharePointDestination.Log.Info((object)("START Processing " + file.OriginalFullName)); CreateFileResult file1 = this.DocumentLibraryRepository.CreateFile(file); this.ExistingFilenames.Add(new NameSourcePair() { Name = file.Name, Source = file.OriginalFullName }); System.Diagnostics.Debug.WriteLine("End Processing " + file.OriginalFullName); if (!file1.Succeeded) { return; } this.RaiseItemProcessed((ImportItem)orig, file1.Location); } }
protected void BtnUpload_Click(object sender, EventArgs e) { if (ImportFile.HasFile) { // Get the file extension string fileExtension = System.IO.Path.GetExtension(ImportFile.FileName); if (fileExtension.ToLower() != ".csv" && fileExtension.ToLower() != ".xlsx") { lblMessage.ForeColor = System.Drawing.Color.Red; lblMessage.Text = "Only files with .csv and .xlsx extension are allowed"; } else { // Upload the file //string Fname = System.DateTime.Now.ToString("ddMMyyhhmmss") + FileUpload1.FileName; ImportFile.SaveAs(Server.MapPath("~/Upload/Fees/" + ImportFile.FileName)); //FileUpload1.SaveAs(Server.MapPath("~/Archive/" + FileUpload1.FileName)); lblMessage.ForeColor = System.Drawing.Color.Green; lblMessage.Text = "File uploaded successfully"; } } else { lblMessage.ForeColor = System.Drawing.Color.Red; lblMessage.Text = "Please select a file"; } ReadWriteCSVFile(); GetFees(); }
public static Table ImportFromFile(HttpPostedFileBase httpPostedFileBase, IImportFileStructure importFileStructure) { var file = new ImportFile(httpPostedFileBase); var fileImporterFactory = new FileImporterFactory(file, importFileStructure); return(fileImporterFactory.GetDataTable()); }
public async Task Set_activation_should_set_state_on_eligible_files() { const string testText = "File Activation test 6"; Msg.Title(testText, "Set ImportFile::IsActivated"); var ts = new TestSupport(); var customerUid = Guid.NewGuid(); var startDateTime = ts.FirstEventDate; var createProjectResponse = ExecutorTestFixture.CreateCustomerProject(customerUid.ToString(), testText, Boundaries.Boundary1); ts.ProjectUid = new Guid(createProjectResponse.Result.Id); var importFilename = TestFileResolver.File(TestFile.TestAlignment2); var fullFilePath = TestFileResolver.GetFullPath(importFilename); var fileResult = await ImportFiles(ts, ts.ProjectUid, customerUid, startDateTime, fullFilePath); await DoActivationRequest(customerUid, ts.ProjectUid.ToString(), fileResult.ImportedFileDescriptor.ImportedFileUid, false, HttpStatusCode.OK, 200, "Success"); //Confirm it's deactivated for this user var importFile = new ImportFile(); var importFileList = await importFile.GetImportedFilesFromWebApi <ImportedFileDescriptorListResult>($"api/v6/importedfiles?projectUid={ts.ProjectUid}", customerUid); Assert.Single(importFileList.ImportedFileDescriptors); Assert.False(importFileList.ImportedFileDescriptors[0].IsActivated, "Should be deactivated for user 1"); //and activated for another user importFileList = await importFile.GetImportedFilesFromWebApi <ImportedFileDescriptorListResult>($"api/v6/importedfiles?projectUid={ts.ProjectUid}", customerUid, RestClient.ANOTHER_JWT); Assert.Single(importFileList.ImportedFileDescriptors); Assert.True(importFileList.ImportedFileDescriptors[0].IsActivated, "Should be activated for user 2"); }
public async Task TestImportANewFileThenUpdateTheSurveyedSurfaceFile(string uriRoot) { const string testText = "File Import SS test 3"; Msg.Title(testText, "Create standard project then upload a new surveyed surface file. Then update surveyed surface file"); var ts = new TestSupport(); var customerUid = Guid.NewGuid(); var startDateTime = ts.FirstEventDate; var createProjectResponse = ExecutorTestFixture.CreateCustomerProject(customerUid.ToString(), testText, Boundaries.Boundary1); ts.ProjectUid = new Guid(createProjectResponse.Result.Id); var importFile = new ImportFile(uriRoot); var importFilename = TestFileResolver.File(TestFile.TestDesignSurface1); var importFileArray = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | SurveyedUtc | IsActivated | MinZoomLevel | MaxZoomLevel |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {TestFileResolver.GetFullPath(importFilename)} | 2 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | {startDateTime} | true | 0 | 0 |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {TestFileResolver.GetFullPath(importFilename)} | 2 | {startDateTime.AddDays(10)} | {startDateTime.AddDays(10)} | [email protected] | {startDateTime} | true | 0 | 0 |" }; var filesResult = await importFile.SendRequestToFileImportV6(ts, importFileArray, 1, new ImportOptions(HttpMethod.Post, new[] { $"filename={importFilename}" })); var expectedResult1 = importFile.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor; ts.CompareTheActualImportFileWithExpected(filesResult.ImportedFileDescriptor, expectedResult1, true); _ = await importFile.SendRequestToFileImportV6(ts, importFileArray, 2, new ImportOptions(HttpMethod.Put, new[] { $"filename={importFilename}" })); var expectedResult2 = importFile.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor; var importFileList = await importFile.GetImportedFilesFromWebApi <ImportedFileDescriptorListResult>($"api/v6/importedfiles?projectUid={ts.ProjectUid}", customerUid); Assert.True(importFileList.ImportedFileDescriptors.Count == 1, "Expected 1 imported files but got " + importFileList.ImportedFileDescriptors.Count); ts.CompareTheActualImportFileWithExpectedV6(importFileList.ImportedFileDescriptors[0], expectedResult2, true); }
public async Task ManualTRexTest_CreateImportedFile(string uriRoot) { const string testText = "File Import Misc 2"; Msg.Title(testText, "Create standard project then upload a new design surface file."); var ts = new TestSupport(); var customerUid = Guid.NewGuid(); var startDateTime = ts.FirstEventDate; var createProjectResponse = ExecutorTestFixture.CreateCustomerProject(customerUid.ToString(), testText, Boundaries.Boundary1); ts.ProjectUid = new Guid(createProjectResponse.Result.Id); var importFile = new ImportFile(uriRoot); var importFilename = TestFileResolver.File(TestFile.TestDesignSurface3_GoodContent); var fullFilePath = TestFileResolver.GetFullPath(importFilename); var importFileArray = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | IsActivated | MinZoomLevel | MaxZoomLevel |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {fullFilePath} | 1 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 |" }; var filesResult = await importFile.SendRequestToFileImportV6(ts, importFileArray, 1, new ImportOptions(HttpMethod.Post, new[] { "filename=TestDesignSurfaceTestDesignSurface3_GoodContent.TTM" })); Assert.NotNull(filesResult); Assert.Equal(0, filesResult.Code); Assert.Equal(ts.ProjectUid.ToString(), filesResult.ImportedFileDescriptor.ProjectUid); var trexService = new TRex(); var designsResult = await trexService.GetDesignsFromTrex(customerUid.ToString(), ts.ProjectUid.ToString()); Assert.Equal(0, designsResult.Code); Assert.Single(designsResult.DesignFileDescriptors); Assert.Equal(filesResult.ImportedFileDescriptor.ImportedFileUid, designsResult.DesignFileDescriptors[0].DesignUid); Assert.Equal("TestDesignSurface3_GoodContent.TTM", designsResult.DesignFileDescriptors[0].Name); }
public IHttpActionResult Post(string name, string content) { if (string.IsNullOrEmpty(name)) { return(Error("Invalid file name")); } if (string.IsNullOrEmpty(content)) { return(Error("Invalid file content")); } try { var directory = _configuration.GetConfigurationValue(ConfigurationKeys.ImportDirectory, ""); if (string.IsNullOrEmpty(directory)) { return(InternalServerError("Import directory not set")); } var file = ImportFile.Create(Path.Combine(directory, name), content); file.Save(); } catch (Exception e) { _logger.Error(e.Message); return(InternalServerError("File not create")); } return(Ok()); }
public CreateFileResult CreateFile(ImportFile file) { EnsureInitialized(); using (var context = CreateContext()) { var location = ApplicationUrl + m_serverRelativeListUrl + file.Parent.ServerRelativePath; bool fileCreated = false; bool succeeded = true; try { CreateFile(file, context); fileCreated = true; ApplyMetaData(file, context); } catch (Exception e) { succeeded = false; log.Error(e); if (fileCreated) { log.Info("removing " + location); DeleteFile(file, context); } } return(new CreateFileResult { Succeeded = succeeded, Location = location }); } }
public CreateFileResult CreateFile(ImportFile file) { this.EnsureInitialized(); using (ClientContext context = this.CreateContext()) { string str = this.ApplicationUrl + this.m_serverRelativeListUrl + file.Parent.ServerRelativePath; bool flag1 = false; bool flag2 = true; try { this.CreateFile(file, context); flag1 = true; //this.ApplyMetaData(file, context); } catch (Exception ex) { flag2 = false; DocumentLibraryRepository.log.Error((object)ex); if (flag1) { DocumentLibraryRepository.log.Info((object)("removing " + str)); this.DeleteFile(file, context); } } return(new CreateFileResult() { Succeeded = flag2, Location = str }); } }
private void DeleteFile(ImportFile file, ClientContext context) { string serverRelativeUrl = this.m_serverRelativeListUrl + file.ServerRelativePath; context.Web.GetFileByServerRelativeUrl(serverRelativeUrl).DeleteObject(); context.ExecuteQuery(); }
private void MapMembers(ImportFile importFile, ListItem listItem) { listItem["Created"] = (object)importFile.Created; listItem["Modified"] = (object)importFile.Modified; if (importFile.ModifiedBy != null) { listItem["Editor"] = (object)new FieldUserValue() { LookupId = importFile.ModifiedBy.Id } } ; if (importFile.CreatedBy != null) { listItem["Author"] = (object)new FieldUserValue() { LookupId = importFile.CreatedBy.Id } } ; listItem["_Source"] = (object)importFile.OriginalFullName; foreach (KeyValuePair <string, string> keyValuePair in (IEnumerable <KeyValuePair <string, string> >)importFile.MetaData) { if (this.m_availableFields.ContainsKey(keyValuePair.Key)) { listItem[keyValuePair.Key] = (object)keyValuePair.Value; } else { DocumentLibraryRepository.log.Warn((object)("Could not import " + keyValuePair.Key + ". Field not found")); } } }
public async Task TestImportANewFileThenUpdateTheDesignSurfaceFile_SameFileDates(string uriRoot) { const string testText = "File Import ds test 4"; Msg.Title(testText, "Create standard project then upload a new design surface file. Then update design surface file however leave same FileDates"); var ts = new TestSupport(); var customerUid = Guid.NewGuid(); var startDateTime = ts.FirstEventDate; var createProjectResponse = ExecutorTestFixture.CreateCustomerProject(customerUid.ToString(), testText, Boundaries.Boundary1); ts.ProjectUid = new Guid(createProjectResponse.Result.Id); var importFile = new ImportFile(uriRoot); var importFilename = TestFileResolver.File(TestFile.TestDesignSurface1); var fullPath = TestFileResolver.GetFullPath(importFilename); var importFileArray = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | IsActivated | MinZoomLevel | MaxZoomLevel |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {fullPath} | 1 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {fullPath} | 1 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {fullPath} | 1 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 |" }; await importFile.SendRequestToFileImportV6(ts, importFileArray, 1, new ImportOptions(HttpMethod.Post, new[] { $"filename={importFilename}" })); await importFile.SendRequestToFileImportV6(ts, importFileArray, 2, new ImportOptions(HttpMethod.Put, new[] { $"filename={importFilename}" })); var filesResult3 = await importFile.SendRequestToFileImportV6(ts, importFileArray, 2, new ImportOptions(HttpMethod.Put, new[] { $"filename={importFilename}" })); Assert.Single(filesResult3.ImportedFileDescriptor.ImportedFileHistory); }
private void MapMembers(ImportFile importFile, ListItem listItem) { listItem["Created"] = importFile.Created; listItem["Modified"] = importFile.Modified; if (importFile.ModifiedBy != null) { listItem["Editor"] = new FieldUserValue { LookupId = importFile.ModifiedBy.Id }; } if (importFile.CreatedBy != null) { listItem["Author"] = new FieldUserValue { LookupId = importFile.CreatedBy.Id }; } listItem["_Source"] = importFile.OriginalFullName; foreach (var foo in importFile.MetaData) { if (m_availableFields.ContainsKey(foo.Key)) { listItem[foo.Key] = foo.Value; } else { log.Warn("Could not import " + foo.Key + ". Field not found"); } } }
public async Task TestImport2ReferenceSurfaceFiles(string uriRoot1, string uriRoot2) { const string testText = "File Import ref test 3"; Msg.Title(testText, "Create standard project and customer then upload two Reference surface files"); var ts = new TestSupport(); var customerUid = Guid.NewGuid(); var startDateTime = ts.FirstEventDate; var createProjectResponse = ExecutorTestFixture.CreateCustomerProject(customerUid.ToString(), testText, Boundaries.Boundary1); ts.ProjectUid = new Guid(createProjectResponse.Result.Id); var importFileParent = new ImportFile(uriRoot1); var importFileChild = new ImportFile(uriRoot2); //Parent Design var importFilename = TestFileResolver.File(TestFile.TestDesignSurface1); var parentName = TestFileResolver.GetFullPath(importFilename); var importFileArray = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | IsActivated | MinZoomLevel | MaxZoomLevel |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {parentName} | 1 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 |" }; var filesResult1 = await importFileParent.SendRequestToFileImportV6(ts, importFileArray, 1, new ImportOptions(HttpMethod.Post, new[] { $"filename={importFilename}" })); var expectedResult1 = importFileParent.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor; ts.CompareTheActualImportFileWithExpected(filesResult1.ImportedFileDescriptor, importFileParent.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor, true); //Reference Surfaces var parentUid = filesResult1.ImportedFileDescriptor.ImportedFileUid; var offset1 = 1.5; var offset2 = -2.5; parentName = Path.GetFileNameWithoutExtension(parentName); var name1 = $"{parentName} +{offset1}m"; var name2 = $"{parentName} {offset2}m"; var importFileArray2 = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | IsActivated | MinZoomLevel | MaxZoomLevel | ParentUid | Offset |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {name1} | 6 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 | {parentUid} | {offset1} |", $"| ImportedFileDescriptor | {ts.ProjectUid} | {customerUid} | {name2} | 6 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true | 15 | 19 | {parentUid} | {offset2} |" }; var filesResult2 = await importFileChild.SendRequestToFileImportV6(ts, importFileArray2, 1, new ImportOptions(HttpMethod.Post, new[] { $"filename={HttpUtility.UrlEncode(name1)}" })); var expectedResult2 = importFileChild.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor; ts.CompareTheActualImportFileWithExpected(filesResult2.ImportedFileDescriptor, expectedResult2, true); var filesResult3 = await importFileChild.SendRequestToFileImportV6(ts, importFileArray2, 2, new ImportOptions(HttpMethod.Post, new[] { $"filename={HttpUtility.UrlEncode(name2)}" })); var expectedResult3 = importFileChild.ExpectedImportFileDescriptorSingleResult.ImportedFileDescriptor; ts.CompareTheActualImportFileWithExpected(filesResult3.ImportedFileDescriptor, expectedResult3, true); var importFileList = await importFileParent.GetImportedFilesFromWebApi <ImportedFileDescriptorListResult>($"api/v6/importedfiles?projectUid={ts.ProjectUid}", customerUid); Assert.True(importFileList.ImportedFileDescriptors.Count == 3, "Expected 3 imported files but got " + importFileList.ImportedFileDescriptors.Count); ts.CompareTheActualImportFileWithExpectedV6(importFileList.ImportedFileDescriptors[0], expectedResult1, true); ts.CompareTheActualImportFileWithExpectedV6(importFileList.ImportedFileDescriptors[1], expectedResult2, true); ts.CompareTheActualImportFileWithExpectedV6(importFileList.ImportedFileDescriptors[2], expectedResult3, true); }
protected void Upload_Click(object sender, EventArgs e) { PageErrors errors = PageErrors.getErrors(null, Page.Master); errors.clear(); if (ImportFile.HasFile) { //DataTable dataTable = HROne.CSVProcess.CSVReader.parse(CNDImportFile.PostedFile.InputStream); string strTmpFolder = HROne.Common.Folder.GetOrCreateSessionTempFolder(Session.SessionID).FullName;; //Environment.GetFolderPath(Environment.SpecialFolder.InternetCache); string strTmpFile = System.IO.Path.Combine(strTmpFolder, AppUtils.ServerDateTime().ToString("~yyyyMMddHHmmss_") + ImportFile.FileName); ImportFile.SaveAs(strTmpFile); ImportTimeCardRecordProcess timeCardRecordImport = new ImportTimeCardRecordProcess(dbConn, Session.SessionID); timeCardRecordImport.DateSequence = this.Attendance_ImportFormatParameterControl1.DateSequence; timeCardRecordImport.DateSeparator = this.Attendance_ImportFormatParameterControl1.DateSeparator; timeCardRecordImport.YearFormat = this.Attendance_ImportFormatParameterControl1.YearFormat; timeCardRecordImport.TimeSeparator = this.Attendance_ImportFormatParameterControl1.TimeSeparator; timeCardRecordImport.DateColumnIndex = this.Attendance_ImportFormatParameterControl1.DateColumnIndex; timeCardRecordImport.TimeColumnIndex = this.Attendance_ImportFormatParameterControl1.TimeColumnIndex; timeCardRecordImport.DateColumnIndex2 = this.Attendance_ImportFormatParameterControl1.DateColumnIndex2; timeCardRecordImport.TimeColumnIndex2 = this.Attendance_ImportFormatParameterControl1.TimeColumnIndex2; timeCardRecordImport.LocationColumnIndex = this.Attendance_ImportFormatParameterControl1.LocationColumnIndex; timeCardRecordImport.TimeCardNumColumnIndex = this.Attendance_ImportFormatParameterControl1.TimeCardNumColumnIndex; timeCardRecordImport.ColumnDelimiter = this.Attendance_ImportFormatParameterControl1.ColumnDelimiter; timeCardRecordImport.UploadFileHasHeader = this.Attendance_ImportFormatParameterControl1.UploadFileHasHeader; //DataTable dataTable = HROne.Import.ExcelImport.parse(strTmpFile); //using (OleDbConnection conn = new OleDbConnection(@"Provider=Microsoft.Jet.OLEDB.4.0;Data Source=C:\csv\;Extended Properties='Text;'")) try { timeCardRecordImport.UploadToTempDatabase(strTmpFile, WebUtils.GetCurUser(Session).UserID, string.Empty); timeCardRecordImport.ImportToDatabase(); errors.addError(HROne.Translation.PageMessage.IMPORT_SUCCESSFUL); Attendance_ImportFormatParameterControl1.SaveSettings(); } catch (HRImportException ex) { if (timeCardRecordImport.errors.List.Count > 0) { foreach (string errorString in timeCardRecordImport.errors.List) { errors.addError(errorString); } } else { errors.addError(ex.Message); } } System.IO.File.Delete(strTmpFile); } else { errors.addError(HROne.Translation.PageErrorMessage.ERROR_INVALID_FILE); } }
async public Task <IActionResult> TestImport() { var user = await _userManager.FindByNameAsync(User.Identity.Name); string error = ImportFile.ImportExcel(@"c:\temp\a1.xlsx", _db, _logger, user.Id); return(Json(new { error = error })); }
private void DeleteFile(ImportFile file, ClientContext context) { string serverRelativeFileUrl = string.Concat(m_serverRelativeListUrl, file.ServerRelativePath); var f = context.Web.GetFileByServerRelativeUrl(serverRelativeFileUrl); f.DeleteObject(); context.ExecuteQuery(); }
public void TestAddFilesToFolder() { var file = new ImportFile(); var folder = new ImportFolder(); folder.Add(file); Assert.AreEqual(folder, file.Parent); }
public override global::System.Data.DataSet Clone() { ImportFile cln = ((ImportFile)(base.Clone())); cln.InitVars(); cln.SchemaSerializationMode = this.SchemaSerializationMode; return(cln); }
EditWindow(ImportFile file, Window parentWindow) { Owner = parentWindow; InitializeComponent(); thisFile = file; PathBox.Text = thisFile.FilePath; PathBox.TextChanged += (o, e) => changed = true; }
public void Import(string fileName) { int lineHeader = 0; Sample2 sample2Layout = new Sample2(); ImportFile = new ImportFile(sample2Layout); if (ImportFile.PrepareFile(fileName)) { Lines.Clear(); while (ImportFile.ReadLine() && !ImportFile.ReadFailure) { Lines.Append(ImportFile.Line + Environment.NewLine); //The Sample2 line Identifier is encountered in column 1 //Delimited layout should always mark the column identifier ImportFile.SetIdentificadorCorrente(ImportFile.CurrentLine["Col_1"].ToString()); switch ((Sample2.RecordType) int.Parse(ImportFile.CurrentIdentifier)) { case Sample2.RecordType.Header: lineHeader = ImportFile.CurrentLineNumber; sample2Layout.HeaderRows.Add(new Sample2.HeaderRow(ImportFile.CurrentLine)); sample2Layout.HeaderRows.Last().LineNumber = ImportFile.CurrentLineNumber; break; case Sample2.RecordType.Detail: sample2Layout.DetailRows.Add(new Sample2.DetailRow(ImportFile.CurrentLine)); sample2Layout.DetailRows.Last().LineNumber = ImportFile.CurrentLineNumber; sample2Layout.DetailRows.Last().ParentLineNumber = lineHeader; break; case Sample2.RecordType.Trailer: sample2Layout.TrailerRows.Add(new Sample2.TrailerRow(ImportFile.CurrentLine)); sample2Layout.TrailerRows.Last().LineNumber = ImportFile.CurrentLineNumber; sample2Layout.TrailerRows.Last().ParentLineNumber = lineHeader; break; } if (this.ImportOnlyFirstLine) { break; } } if (ImportFile.Error) { AddLineError("INTERNAL", ImportFile.ErrorDescription, ImportFile.CurrentLineNumber); } } else { AddLineError("INTERNAL", ImportFile.ErrorDescription, ImportFile.CurrentLineNumber); } }
private JsonResult ReadFields(ImportFile importFile) { var textReader = new StringReader(importFile.Contents.AsString(importFile.CodePage)); var csv = new CsvReader(textReader, true) { SkipEmptyLines = true }; var csvHeaders = csv.GetFieldHeaders(); //mapping: csv->db,csv->db var currentMappings = importFile.ColumnsToRead.DefaultTo("").SplitWithString(",").Select(s => s.SplitWithString("->")); var dbFields = DbFieldsList; const int numSampleLinesWanted = 5; var numSampleLinesFound = numSampleLinesWanted; var sampleValues = new Dictionary <string, List <string> >(); for (var i = 0; i < numSampleLinesFound; i++) { if (csv.EndOfStream) { numSampleLinesFound = i; break; } foreach (var csvHeader in csvHeaders) { if (i == 0) { if (sampleValues.ContainsKey(csvHeader)) { // ignore second column with same title continue; } sampleValues.Add(csvHeader, new List <string> { csv[i, csvHeader] }); } else { sampleValues[csvHeader].Add(csv[i, csvHeader]); } } } return(new { possible = dbFields, csvFields = csvHeaders.Select(header => new { field = header, map = currentMappings.Where(cs => cs[0] == header) .Select(cs => cs[1]).SingleOrDefault().DefaultTo(""), sample = sampleValues[header] }) }.AsJsonResult()); }
public void TestNonDuplicateName() { var file = new ImportFile { Name = "Test.docx" }; var nameResolver = new FlatListDuplicateNameResolver(); Assert.AreEqual(file.Name, nameResolver.ResolveName(file)); }
public void Import(string fileName) { int lineHeader = 0; Sample1 sample1Layout = new Sample1(); ImportFile = new ImportFile(sample1Layout); if (ImportFile.PrepareFile(fileName)) { Lines.Clear(); while (ImportFile.ReadLine() && !ImportFile.ReadFailure) { Lines.Append(ImportFile.Line + Environment.NewLine); switch ((Sample1.RecordType) int.Parse(ImportFile.CurrentIdentifier)) { case Sample1.RecordType.Header: lineHeader = ImportFile.CurrentLineNumber; sample1Layout.HeaderRows.Add(new Sample1.HeaderRow(ImportFile.CurrentLine)); sample1Layout.HeaderRows.Last().LineNumber = ImportFile.CurrentLineNumber; break; case Sample1.RecordType.Detail: sample1Layout.DetailRows.Add(new Sample1.DetailRow(ImportFile.CurrentLine)); sample1Layout.DetailRows.Last().LineNumber = ImportFile.CurrentLineNumber; sample1Layout.DetailRows.Last().ParentLineNumber = lineHeader; break; case Sample1.RecordType.Trailer: sample1Layout.TrailerRows.Add(new Sample1.TrailerRow(ImportFile.CurrentLine)); sample1Layout.TrailerRows.Last().LineNumber = ImportFile.CurrentLineNumber; sample1Layout.TrailerRows.Last().ParentLineNumber = lineHeader; break; } if (this.ImportOnlyFirstLine) { break; } } if (ImportFile.Error) { AddLineError("INTERNAL", ImportFile.ErrorDescription, ImportFile.CurrentLineNumber); } } else { AddLineError("INTERNAL", ImportFile.ErrorDescription, ImportFile.CurrentLineNumber); } }
private Task <ImportedFileDescriptorSingleResult> ImportFiles(TestSupport testSupport, Guid projectUid, Guid customerUid, DateTime startDateTime, string testFile) { var importFile = new ImportFile(); var importFileArray = new[] { "| EventType | ProjectUid | CustomerUid | Name | ImportedFileType | FileCreatedUtc | FileUpdatedUtc | ImportedBy | IsActivated |", $"| ImportedFileDescriptor | {projectUid} | {customerUid} | {testFile} | 3 | {startDateTime} | {startDateTime.AddDays(5)} | [email protected] | true |" }; return(importFile.SendRequestToFileImportV6(testSupport, importFileArray, 1, new ImportOptions(HttpMethod.Post))); }
private ImportItem Load(string name) { var directory = new DirectoryInfo(name); var item = new ImportFolder { SourceDirectory = directory, Name = m_filenameConverter.Convert(directory.Name), CreatedBy = MetaDataProvider.GetAuthor(directory.FullName), ModifiedBy = MetaDataProvider.GetEditor(directory.FullName), MetaData = MetaDataProvider.GetMetaData(directory.FullName) }; var result = m_validator.Validate(item); if (result.IsValid) { foreach (var folder in directory.GetDirectories()) { if ((folder.Attributes & FileAttributes.Hidden) != FileAttributes.Hidden || m_settings.ImportHiddenFiles) { item.Add(Load(folder.FullName)); } } foreach (var file in directory.GetFiles()) { if ((file.Attributes & FileAttributes.Hidden) != FileAttributes.Hidden || m_settings.ImportHiddenFiles) { var importFile = new ImportFile { SourceFile = file, Name = m_filenameConverter.Convert(file.Name), CreatedBy = MetaDataProvider.GetAuthor(file.FullName), ModifiedBy = MetaDataProvider.GetEditor(file.FullName), MetaData = MetaDataProvider.GetMetaData(file.FullName), }; var fileResult = m_validator.Validate(importFile); if (fileResult.IsValid) { item.Add(importFile); } else { Log(fileResult); } } } } else { Log(result); } return(item); }
// Question 5: Please provide a break down by project group of success and unsuccessful deployments (success being releases that are deployed to live), // the number of deployments involved in the release pipeline and whether some environments had to be repeatedly deployed. public IEnumerable <IGrouping <string, Project> > ProjectsByGroup(ImportFile importFile) { try { return(importFile.Projects.GroupBy(x => x.Group)); } catch (Exception e) { throw new Exception("Failed to group projects by project group", e); } }
public async Task <IHttpActionResult> ImportExcelAsync(ImportFile importFile) { try { var returnValue = _importManager.ImportToTable(Path.Combine($@"{importFile.FilePath}", $@"{importFile.FileName}")); return(Ok(returnValue)); } catch (Exception ex) { return(InternalServerError(ex)); } }
public static global::System.Xml.Schema.XmlSchemaComplexType GetTypedDataSetSchema(global::System.Xml.Schema.XmlSchemaSet xs) { ImportFile ds = new ImportFile(); global::System.Xml.Schema.XmlSchemaComplexType type = new global::System.Xml.Schema.XmlSchemaComplexType(); global::System.Xml.Schema.XmlSchemaSequence sequence = new global::System.Xml.Schema.XmlSchemaSequence(); global::System.Xml.Schema.XmlSchemaAny any = new global::System.Xml.Schema.XmlSchemaAny(); any.Namespace = ds.Namespace; sequence.Items.Add(any); type.Particle = sequence; global::System.Xml.Schema.XmlSchema dsSchema = ds.GetSchemaSerializable(); if (xs.Contains(dsSchema.TargetNamespace)) { global::System.IO.MemoryStream s1 = new global::System.IO.MemoryStream(); global::System.IO.MemoryStream s2 = new global::System.IO.MemoryStream(); try { global::System.Xml.Schema.XmlSchema schema = null; dsSchema.Write(s1); for (global::System.Collections.IEnumerator schemas = xs.Schemas(dsSchema.TargetNamespace).GetEnumerator(); schemas.MoveNext();) { schema = ((global::System.Xml.Schema.XmlSchema)(schemas.Current)); s2.SetLength(0); schema.Write(s2); if ((s1.Length == s2.Length)) { s1.Position = 0; s2.Position = 0; for (; ((s1.Position != s1.Length) && (s1.ReadByte() == s2.ReadByte()));) { ; } if ((s1.Position == s1.Length)) { return(type); } } } } finally { if ((s1 != null)) { s1.Close(); } if ((s2 != null)) { s2.Close(); } } } xs.Add(dsSchema); return(type); }
private void CreateImportFileEntity(CrmOrganizationServiceContext serviceContext, string content, Guid importId, Guid dataMapId) { var importFile = new ImportFile { Name = Path.GetFileName(this.FilePath), Source = this.FilePath, Content = content, SourceEntityName = this.SourceEntityName, TargetEntityName = this.TargetEntityName, FileTypeCode = new OptionSetValue(0), DataDelimiterCode = new OptionSetValue(1), FieldDelimiterCode = new OptionSetValue(2), IsFirstRowHeader = new bool?(true), EnableDuplicateDetection = new bool?(false), ProcessCode = new OptionSetValue(1), ImportId = new EntityReference("import", importId), ImportMapId = new EntityReference("importmap", dataMapId) }; serviceContext.Create(importFile); }
/// <summary> /// Imports records to Microsoft Dynamics CRM from the specified .csv file. /// </summary> public void ImportRecords() { // Create an import map. ImportMap importMap = new ImportMap() { Name = "Import Map " + DateTime.Now.Ticks.ToString(), Source = "Import Accounts.csv", Description = "Description of data being imported", EntitiesPerFile = new OptionSetValue((int)ImportMapEntitiesPerFile.SingleEntityPerFile), EntityState = EntityState.Created }; Guid importMapId = _serviceProxy.Create(importMap); // Create column mappings. #region Column One Mappings // Create a column mapping for a 'text' type field. ColumnMapping colMapping1 = new ColumnMapping() { // Set source properties. SourceAttributeName = "src_name", SourceEntityName = "Account_1", // Set target properties. TargetAttributeName = "name", TargetEntityName = Account.EntityLogicalName, // Relate this column mapping with the data map. ImportMapId = new EntityReference(ImportMap.EntityLogicalName, importMapId), // Force this column to be processed. ProcessCode = new OptionSetValue((int)ColumnMappingProcessCode.Process) }; // Create the mapping. Guid colMappingId1 = _serviceProxy.Create(colMapping1); #endregion #region Column Two Mappings // Create a column mapping for a 'lookup' type field. ColumnMapping colMapping2 = new ColumnMapping() { // Set source properties. SourceAttributeName = "src_parent", SourceEntityName = "Account_1", // Set target properties. TargetAttributeName = "parentaccountid", TargetEntityName = Account.EntityLogicalName, // Relate this column mapping with the data map. ImportMapId = new EntityReference(ImportMap.EntityLogicalName, importMapId), // Force this column to be processed. ProcessCode = new OptionSetValue((int)ColumnMappingProcessCode.Process), }; // Create the mapping. Guid colMappingId2 = _serviceProxy.Create(colMapping2); // Because we created a column mapping of type lookup, we need to specify lookup details in a lookupmapping. // One lookupmapping will be for the parent account, and the other for the current record. // This lookupmapping is important because without it the current record // cannot be used as the parent of another record. // Create a lookup mapping to the parent account. LookUpMapping parentLookupMapping = new LookUpMapping() { // Relate this mapping with its parent column mapping. ColumnMappingId = new EntityReference(ColumnMapping.EntityLogicalName, colMappingId2), // Force this column to be processed. ProcessCode = new OptionSetValue((int)LookUpMappingProcessCode.Process), // Set the lookup for an account entity by its name attribute. LookUpEntityName = Account.EntityLogicalName, LookUpAttributeName = "name", LookUpSourceCode = new OptionSetValue((int)LookUpMappingLookUpSourceCode.System) }; // Create the lookup mapping. Guid parentLookupMappingId = _serviceProxy.Create(parentLookupMapping); // Create a lookup on the current record's "src_name" so that this record can // be used as the parent account for another record being imported. // Without this lookup, no record using this account as its parent will be imported. LookUpMapping currentLookUpMapping = new LookUpMapping() { // Relate this lookup with its parent column mapping. ColumnMappingId = new EntityReference(ColumnMapping.EntityLogicalName, colMappingId2), // Force this column to be processed. ProcessCode = new OptionSetValue((int)LookUpMappingProcessCode.Process), // Set the lookup for the current record by its src_name attribute. LookUpAttributeName = "src_name", LookUpEntityName = "Account_1", LookUpSourceCode = new OptionSetValue((int)LookUpMappingLookUpSourceCode.Source) }; // Create the lookup mapping Guid currentLookupMappingId = _serviceProxy.Create(currentLookUpMapping); #endregion #region Column Three Mappings // Create a column mapping for a 'picklist' type field ColumnMapping colMapping3 = new ColumnMapping() { // Set source properties SourceAttributeName = "src_addresstype", SourceEntityName = "Account_1", // Set target properties TargetAttributeName = "address1_addresstypecode", TargetEntityName = Account.EntityLogicalName, // Relate this column mapping with its parent data map ImportMapId = new EntityReference(ImportMap.EntityLogicalName, importMapId), // Force this column to be processed ProcessCode = new OptionSetValue((int)ColumnMappingProcessCode.Process) }; // Create the mapping Guid colMappingId3 = _serviceProxy.Create(colMapping3); // Because we created a column mapping of type picklist, we need to specify picklist details in a picklistMapping PickListMapping pickListMapping1 = new PickListMapping() { SourceValue = "bill", TargetValue = 1, // Relate this column mapping with its column mapping data map ColumnMappingId = new EntityReference(ColumnMapping.EntityLogicalName, colMappingId3), // Force this column to be processed ProcessCode = new OptionSetValue((int)PickListMappingProcessCode.Process) }; // Create the mapping Guid picklistMappingId1 = _serviceProxy.Create(pickListMapping1); // Need a picklist mapping for every address type code expected PickListMapping pickListMapping2 = new PickListMapping() { SourceValue = "ship", TargetValue = 2, // Relate this column mapping with its column mapping data map ColumnMappingId = new EntityReference(ColumnMapping.EntityLogicalName, colMappingId3), // Force this column to be processed ProcessCode = new OptionSetValue((int)PickListMappingProcessCode.Process) }; // Create the mapping Guid picklistMappingId2 = _serviceProxy.Create(pickListMapping2); #endregion // Create Import Import import = new Import() { // IsImport is obsolete; use ModeCode to declare Create or Update. ModeCode = new OptionSetValue((int)ImportModeCode.Create), Name = "Importing data" }; Guid importId = _serviceProxy.Create(import); // Create Import File. ImportFile importFile = new ImportFile() { Content = BulkImportHelper.ReadCsvFile("Import Accounts.csv"), // Read contents from disk. Name = "Account record import", IsFirstRowHeader = true, ImportMapId = new EntityReference(ImportMap.EntityLogicalName, importMapId), UseSystemMap = false, Source = "Import Accounts.csv", SourceEntityName = "Account_1", TargetEntityName = Account.EntityLogicalName, ImportId = new EntityReference(Import.EntityLogicalName, importId), EnableDuplicateDetection = false, FieldDelimiterCode = new OptionSetValue((int)ImportFileFieldDelimiterCode.Comma), DataDelimiterCode = new OptionSetValue((int)ImportFileDataDelimiterCode.DoubleQuote), ProcessCode = new OptionSetValue((int)ImportFileProcessCode.Process) }; // Get the current user to set as record owner. WhoAmIRequest systemUserRequest = new WhoAmIRequest(); WhoAmIResponse systemUserResponse = (WhoAmIResponse)_serviceProxy.Execute(systemUserRequest); // Set the owner ID. importFile.RecordsOwnerId = new EntityReference(SystemUser.EntityLogicalName, systemUserResponse.UserId); Guid importFileId = _serviceProxy.Create(importFile); //<snippetImportWithCreate1> // Retrieve the header columns used in the import file. GetHeaderColumnsImportFileRequest headerColumnsRequest = new GetHeaderColumnsImportFileRequest() { ImportFileId = importFileId }; GetHeaderColumnsImportFileResponse headerColumnsResponse = (GetHeaderColumnsImportFileResponse)_serviceProxy.Execute(headerColumnsRequest); // Output the header columns. int columnNum = 1; foreach (string headerName in headerColumnsResponse.Columns) { Console.WriteLine("Column[" + columnNum.ToString() + "] = " + headerName); columnNum++; } //</snippetImportWithCreate1> //<snippetImportWithCreate2> // Parse the import file. ParseImportRequest parseImportRequest = new ParseImportRequest() { ImportId = importId }; ParseImportResponse parseImportResponse = (ParseImportResponse)_serviceProxy.Execute(parseImportRequest); Console.WriteLine("Waiting for Parse async job to complete"); //</snippetImportWithCreate2> BulkImportHelper.WaitForAsyncJobCompletion(_serviceProxy, parseImportResponse.AsyncOperationId); BulkImportHelper.ReportErrors(_serviceProxy, importFileId); //<snippetImportWithCreate3> // Retrieve the first two distinct values for column 1 from the parse table. // NOTE: You must create the parse table first using the ParseImport message. // The parse table is not accessible after ImportRecordsImportResponse is called. GetDistinctValuesImportFileRequest distinctValuesRequest = new GetDistinctValuesImportFileRequest() { columnNumber = 1, ImportFileId = importFileId, pageNumber = 1, recordsPerPage = 2, }; GetDistinctValuesImportFileResponse distinctValuesResponse = (GetDistinctValuesImportFileResponse)_serviceProxy.Execute(distinctValuesRequest); // Output the distinct values. In this case: (column 1, row 1) and (column 1, row 2). int cellNum = 1; foreach (string cellValue in distinctValuesResponse.Values) { Console.WriteLine("(1, " + cellNum.ToString() + "): " + cellValue); Console.WriteLine(cellValue); cellNum++; } //</snippetImportWithCreate3> //<snippetImportWithCreate4> // Retrieve data from the parse table. // NOTE: You must create the parse table first using the ParseImport message. // The parse table is not accessible after ImportRecordsImportResponse is called. RetrieveParsedDataImportFileRequest parsedDataRequest = new RetrieveParsedDataImportFileRequest() { ImportFileId = importFileId, PagingInfo = new PagingInfo() { // Specify the number of entity instances returned per page. Count = 2, // Specify the number of pages returned from the query. PageNumber = 1, // Specify a total number of entity instances returned. PagingCookie = "1" } }; RetrieveParsedDataImportFileResponse parsedDataResponse = (RetrieveParsedDataImportFileResponse)_serviceProxy.Execute(parsedDataRequest); // Output the first two rows retrieved. int rowCount = 1; foreach (string[] rows in parsedDataResponse.Values) { int colCount = 1; foreach (string column in rows) { Console.WriteLine("(" + rowCount.ToString() + "," + colCount.ToString() + ") = " + column); colCount++; } rowCount++; } //</snippetImportWithCreate4> //<snippetImportWithCreate5> // Transform the import TransformImportRequest transformImportRequest = new TransformImportRequest() { ImportId = importId }; TransformImportResponse transformImportResponse = (TransformImportResponse)_serviceProxy.Execute(transformImportRequest); Console.WriteLine("Waiting for Transform async job to complete"); //</snippetImportWithCreate5> BulkImportHelper.WaitForAsyncJobCompletion(_serviceProxy, transformImportResponse.AsyncOperationId); BulkImportHelper.ReportErrors(_serviceProxy, importFileId); //<snippetImportWithCreate6> // Upload the records. ImportRecordsImportRequest importRequest = new ImportRecordsImportRequest() { ImportId = importId }; ImportRecordsImportResponse importResponse = (ImportRecordsImportResponse)_serviceProxy.Execute(importRequest); Console.WriteLine("Waiting for ImportRecords async job to complete"); //</snippetImportWithCreate6> BulkImportHelper.WaitForAsyncJobCompletion(_serviceProxy, importResponse.AsyncOperationId); BulkImportHelper.ReportErrors(_serviceProxy, importFileId); }