public void RunImportJob(string jobId, string csvFileName) { ImportJob job = ImportJobs.First(x => x.ImportJobId == jobId); if (job != null) { ImportResult result = new ImportResult(); _importResults[jobId] = result; using (var data = GetCsvContent(csvFileName)) { result.Length = data.BaseStream.Length; CsvReader csvReader = new CsvReader(data, job.ColumnDelimiter); try { result.IsRunning = true; result.Started = DateTime.Now; Import(job, csvReader, result); } finally { result.Stopped = DateTime.Now; result.IsRunning = false; } } } }
public void InitializeForOpen() { // initialize if needed if (InnerItem == _originalItem) { IsInitializing = true; DetachEventListeners(); // Load complete item here _originalItem = LoadItem(_originalItem.ImportJobId); InnerItem = _originalItem.DeepClone(_entityFactory as IKnownSerializationTypes); // current workaround for repository crash //ItemRepository = Container.Resolve<IImportRepository>(); _itemRepository.Attach(InnerItem); InitializePropertiesForViewing(); InnerItem.PropertyChanged += CurrentItem_PropertyChanged; //OnUIThread(() => //{ //}); } }
private void ImportCatalog(string path) { var importJob = new ImportJob(path, "Catalog.xml", true); Action importCatalog = () => { _progressMessenger.AddProgressMessageText("Importing Catalog content...", false, 20); Action <IBackgroundTaskMessage> addMessage = msg => { var isError = msg.MessageType == BackgroundTaskMessageType.Error; var percent = (int)Math.Round(msg.GetOverallProgress() * 100); var message = msg.Exception == null ? msg.Message : string.Format("{0} {1}", msg.Message, msg.ExceptionMessage); _progressMessenger.AddProgressMessageText(message, isError, percent); }; importJob.Execute(addMessage, CancellationToken.None); _progressMessenger.AddProgressMessageText("Done importing Catalog content", false, 60); }; importCatalog(); //We are running in front-end site context, the metafield update events are ignored, we need to sync manually _progressMessenger.AddProgressMessageText("Syncing metaclasses with content types", false, 60); SyncMetaClassesToContentTypeModels(); _progressMessenger.AddProgressMessageText("Done syncing metaclasses with content types", false, 70); }
public void GetInstance(FtpJob ftp, FsCopyFromFtpJob fsCopy, ImportJob impJob) { ftpJob = ftp; fsCopyFromFtpJob = fsCopy; importJob = impJob; }
public void ImportCatalog(string path, Action <string> statusChange) { IProgressMessenger _progressMessenger = new WebProgressMessenger(); var importJob = new ImportJob(path, "Catalog.xml", true); Action importCatalog = () => { _progressMessenger.AddProgressMessageText("Importing Catalog content...", false, 20); Action <IBackgroundTaskMessage> addMessage = msg => { if (msg.MessageType == BackgroundTaskMessageType.Warning && msg.Message.Contains("Overwriting")) { return; } var isError = msg.MessageType == BackgroundTaskMessageType.Error; var percent = (int)Math.Round(msg.GetOverallProgress() * 100); var message = msg.Exception == null ? msg.Message : $"{msg.Message} {msg.ExceptionMessage}"; statusChange($"Message: {message} | Has Error: {isError} | Percent: {percent}"); _progressMessenger.AddProgressMessageText(message, isError, percent); }; importJob.Execute(addMessage, CancellationToken.None); _progressMessenger.AddProgressMessageText("Done importing Catalog content", false, 60); }; importCatalog(); }
static Func <string, dynamic> GetTypeConverterForField(ImportJob job, string fieldName) { ImportJob.SchemaItemInfo outp; if (job.Schema.TryGetValue(fieldName, out outp)) { switch (outp.Type.ToLower()) { case "int": return(t => int.Parse(t)); case "long": return(t => long.Parse(t)); case "double": return(t => double.Parse(t)); case "decimal": return(t => decimal.Parse(t)); case "datetime": return(t => DateTime.Parse(t)); case "string": return(t => t); case "bool": return(t => t.Length > 1 ? bool.Parse(t) : t == "1"); default: throw new Exception("Cannot find hanlder for type " + outp.Type); } } return(t => t); }
protected override void ProcessRecord() { base.ProcessRecord(); base.WriteVerbose(string.Format("Downloading Solution Import Log for: {0}", ImportJobId)); CrmConnection connection = CrmConnection.Parse(connectionString); using (OrganizationService service = new OrganizationService(connection)) { ImportJob importJob = service.Retrieve(ImportJob.EntityLogicalName, importJobId, new ColumnSet(true)).ToEntity <ImportJob>(); RetrieveFormattedImportJobResultsRequest importLogRequest = new RetrieveFormattedImportJobResultsRequest() { ImportJobId = importJobId }; RetrieveFormattedImportJobResultsResponse importLogResponse = (RetrieveFormattedImportJobResultsResponse)service.Execute(importLogRequest); if (!string.IsNullOrEmpty(outputFile)) { File.WriteAllText(outputFile, importLogResponse.FormattedResults); } WriteObject(importJob); } base.WriteVerbose(string.Format("Solution Import Log Downloaded Successfully")); }
private static void MonitorJobProgress(IEnhancedOrgService service, Guid importJobId) { var progress = 0; ImportJob job = null; do { Thread.Sleep(5000); try { job = service.Retrieve("importjob", importJobId, new ColumnSet(ImportJob.Fields.Progress, ImportJob.Fields.CompletedOn)) .ToEntity <ImportJob>(); var currentProgress = (int?)job.Progress ?? 0; if (currentProgress - progress > 5) { log.Log($"... imported {progress = currentProgress}% ..."); } } catch { // ignored } }while (job?.CompletedOn == null); }
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e) { ImportJob job = (ImportJob)e.Argument; job.OnProgressUpdated += new ImportJob.ProgressUpdateHandler(this.backgroundWorker1.ReportProgress); e.Result = job.ExecuteJob(); //EXTRA CREDIT: add support for cancellation }
public ImportJobMappingStepViewModel(IRepositoryFactory <IImportRepository> repositoryFactory, IRepositoryFactory <ICatalogRepository> catalogRepositoryFactory, ImportJob item, WizardViewModelBare parentVM, IImportJobEntityFactory importFactory, IViewModelsFactory <IPickAssetViewModel> assetVmFactory, IViewModelsFactory <IColumnMappingViewModel> mappingVmFactory, IImportService importService, ImportEntityType[] entityImporters, IAuthenticationContext authContext) : base(repositoryFactory, catalogRepositoryFactory, importFactory, item, parentVM, assetVmFactory, mappingVmFactory, importService, entityImporters, authContext) { }
public void NewImportJob(HttpListenerRequest req, HttpListenerResponse resp) { //TODO: this is wrong!! internal data detected! client should not work with this ImportArgs args = req.ReadJson <ImportArgs>(); var fnameWithoutExtension = Path.GetFileNameWithoutExtension(args.FileName); var dbName = args.LogName; var schema = new Dictionary <string, ImportJob.SchemaItemInfo>(); schema[args.GroupingField] = new ImportJob.SchemaItemInfo() { GroupingField = true, Type = args.GroupingFieldType }; schema[args.TimeField] = new ImportJob.SchemaItemInfo() { TimeField = true, Type = args.TimeFieldType }; ImportJob ij = new ImportJob() { Database = new ImportJob.DatabaseInfo() { ConnectionInfo = Config.Self.MongoDBConnectionInfo, Database = Config.Self.MongoDbPrefix + dbName, Table = LogDatabase.DATA_COL_NAME }, Id = args.JobID, Input = new ImportJob.CsvInputInfo() { CsvFileName = Config.Self.FilesDir + args.FileName, CsvDelimiter = args.CsvDelimiter }, Schema = schema, TmpFolder = Config.Self.ImporterTempFolder, MaxMemoryUseMegabytes = Config.Self.ImporterMaxRamMegabytes }; //if (JobRoute.FindJob(ij.Id) != null) //{ // throw new ApiException(409, "Job with such id already exists"); //} try { var json = JsonConvert.SerializeObject(ij, Formatting.Indented); var path = NEW_JOBS_PATH + ij.Id.ToString() + ".job"; File.WriteAllText(path, json); } catch (Exception e) { throw new ApiException(500, "Failed to write job file", e); } resp.Close(); }
public bool OnProgressUpdate(ImportJob importJob) { Logger.LogVerbose("Checking Sync Import Status"); if (importJob == null && ImportHandler.Error != null) { Logger.LogVerbose("Execute Failed and Import Job couldn't be found"); Logger.LogVerbose("Execute Request Error: {0}", ImportHandler.Error.Message); return(false); } return(true); }
public CreateImportJobViewModel( IViewModelsFactory <IImportJobOverviewStepViewModel> overviewVmFactory, IViewModelsFactory <IImportJobMappingStepViewModel> mappingVmFactory, ImportJob item, ImportEntityType[] entityImporters) { var itemParameter = new KeyValuePair <string, object>("item", item); var parentVM = new KeyValuePair <string, object>("parentVM", this); var _entityImporters = new KeyValuePair <string, object>("entityImporters", entityImporters); RegisterStep(overviewVmFactory.GetViewModelInstance(itemParameter, parentVM, _entityImporters)); RegisterStep(mappingVmFactory.GetViewModelInstance(itemParameter, parentVM)); }
object CopyValue(object value, ImportJob job) { if (value == null) { return(null); } var attr_type = value.GetType(); // do nothing for a value type or string... if (attr_type.IsValueType || attr_type == typeof(string)) { return(value); } // ...copy a reference type else if (attr_type == typeof(Element)) { var foreign_element = (Element)value; var local_element = AllElements[foreign_element.ID]; Element best_element = null; if (local_element != null && !local_element.Stub) { best_element = local_element; } else if (!foreign_element.Stub && job.ImportMode == ImportRecursionMode.Recursive) { job.Depth++; best_element = ImportElement_internal(foreign_element, job); job.Depth--; } else { best_element = local_element ?? (job.ImportMode == ImportRecursionMode.Stubs ? new Element(this, foreign_element.ID) : (Element)null); } return(best_element); } else if (attr_type == typeof(byte[])) { var inbytes = (byte[])value; var outbytes = new byte[inbytes.Length]; inbytes.CopyTo(outbytes, 0); return(outbytes); } else { throw new ArgumentException("CopyValue: unhandled type."); } }
private void Import(ImportJob job, CsvReader reader, ImportResult result) { Dictionary <string, int> csvNames = GetCsvNamesAndIndexes(reader); ColumnMapping columnMapping = ColumnMappings.First(x => x.ColumnMappingId == job.ColumnMappingId); IEntityImporter importer = _entityImporters.FirstOrDefault(i => i.Name == job.EntityImporter); while (true) { try { string[] csvValues = reader.ReadRow(); if (csvValues == null) { break; } var systemValues = MapColumns(columnMapping.SystemPropertiesMap, csvNames, csvValues); var customValues = MapColumns(columnMapping.CustomPropertiesMap, csvNames, csvValues); importer.Import(job.ContainerId, columnMapping.PropertySetId, systemValues, customValues, _catalogRepository); result.CurrentProgress = reader.CurrentPosition; result.ProcessedRecordsCount++; } catch (Exception e) { result.ErrorsCount++; if (result.Errors == null) { result.Errors = new List <string>(); } result.Errors.Add(e.Message); //check if errors amount reached the allowed errors limit if yes do not save made changes. if (result.ErrorsCount >= job.MaxErrorsCount) { _catalogRepository.UnitOfWork.RollbackChanges(); break; } } } if (result.ErrorsCount < job.MaxErrorsCount) { _catalogRepository.UnitOfWork.Commit(); } }
private async Task ImportUsers(ImportJob importJob, List <MemberImportModel> dataList, CancellationToken cancellationToken) { if (dataList.Count == 0) { _logger.LogWarning("No data to import"); return; } var dataTable = dataList.ToDataTable(); await _dataSession.StoredProcedure("[IQ].[ImportUsers]") .SqlParameter("@userTable", dataTable) .Parameter("@tenantId", importJob.TenantId) .Parameter("@roleName", Data.Constants.Role.MemberName) .ExecuteAsync(cancellationToken); }
private async Task SendNotification(ImportJob importJob, string message, CancellationToken cancellationToken) { var createModel = new NotificationCreateModel { Created = DateTimeOffset.UtcNow, CreatedBy = importJob.CreatedBy, Updated = DateTimeOffset.UtcNow, UpdatedBy = importJob.CreatedBy, TenantId = importJob.TenantId, UserName = importJob.CreatedBy, Type = importJob.Type, Message = message }; var command = new EntityCreateCommand <NotificationCreateModel, NotificationReadModel>(null, createModel); var result = await _mediator.Send(command, cancellationToken); }
private void DoSaveChanges() { if (InnerItem.EntityImporter != SelectedEntityImporter.Name) { InnerItem.EntityImporter = SelectedEntityImporter.Name; } _itemRepository.UnitOfWork.Commit(); _originalItem = InnerItem; OnPropertyChanged("DisplayName"); if (IsSingleDialogEditing) { IsModified = false; } }
public void TestFullLine() { string[] args = "-s Wyoming -y 2010 -outputFolder \"c:\\sandbox\\ACSDataErmine\\\" -workingFolder \"c:\\sandbox\\ACSDataErmine\\Wo-rking\\\" -v test_vars.txt -exportToShape".Split(' '); ImportJob job = new ImportJob(); if (!job.Load(args)) { Assert.Fail("Couldn't parse standard line for full line"); } Assert.AreEqual(AcsState.Wyoming, job.State, "State is wrong for args"); Assert.AreEqual("test_vars.txt", job.IncludedVariableFile, "variables file is wrong for args"); Assert.AreEqual("c:\\sandbox\\ACSDataErmine\\", job.OutputFolder, "Output folder is wrong!"); Assert.AreEqual("c:\\sandbox\\ACSDataErmine\\Wo-rking\\", job.WorkingFolder, "Working folder is wrong!"); Assert.AreEqual(true.ToString(), job.ExportToShapefile, true.ToString(), "flag param is wrong for args"); }
protected ImportJobViewModelBack(IUnityContainer container, ICatalogEntityFactory entityFactory, ImportJob item, WizardViewModelBare parentVM, bool isSingleDialogEditing) { Container = container; _entityFactory = entityFactory; InnerItem = _originalItem = item; _parentViewModel = parentVM; IsSingleDialogEditing = isSingleDialogEditing; _importService = Container.Resolve <IImportService>(); _itemRepository = Container.Resolve <IImportRepository>(); if (isSingleDialogEditing) { _originalItem = InnerItem; OpenItemCommand = new DelegateCommand(() => { var navigationmanager = container.Resolve <NavigationManager>(); NavigationData = new NavigationItem(InnerItem.ImportJobId, NavigationNames.HomeName, NavigationNames.MenuName, this); navigationmanager.Navigate(NavigationData); }); CancelCommand = new DelegateCommand <object>(OnCancelCommand); SaveChangesCommand = new DelegateCommand <object>((x) => OnSaveChangesCommand(), (x) => { return(IsModified); }); MinimizeCommand = new DelegateCommand(() => MinimizableViewRequestedEvent(this, null)); } else { InitializePropertiesForViewing(); } FilePickCommand = new DelegateCommand(RaiseFilePickInteractionRequest); CreateMappingCommand = new DelegateCommand(RaiseCreateMappingInteractionRequest); CancelConfirmRequest = new InteractionRequest <Confirmation>(); CommonConfirmRequest = new InteractionRequest <Confirmation>(); CommonConfirmRequest2 = new InteractionRequest <Confirmation>(); UpdateImporterCommand = new DelegateCommand <EntityImporterBase>((x) => OnImporterChangesCommand(x)); UpdatePropertySetCommand = new DelegateCommand <PropertySet>((x) => OnPropertySetChangesCommand(x)); CatalogChangedCommand = new DelegateCommand <CatalogBase>((x) => OnCatalogChangesCommand(x)); ItemEditCommand = new DelegateCommand <MappingItem>((x) => RaiseItemEditInteractionRequest(x), x => x != null); ItemClearCommand = new DelegateCommand <MappingItem>((x) => RaiseItemClearInteractionRequest(x), x => x != null); }
public IJob CreateJob() { if (String.IsNullOrWhiteSpace(SsrsUriString)) { throw new InvalidArgumentsException("No SSRS URI specified."); } if (!Uri.TryCreate(SsrsUriString, UriKind.Absolute, out _)) { throw new InvalidArgumentsException($"Not a valid absolute URI: {SsrsUriString}"); } var source = GetObjectSource(); if (!source.HasManifest && Site != null) { throw new InvalidArgumentsException("--site was specified but no manifest was found."); } if (!new SsrsObjectTypesParser().ValidateTypes(ObjectFilter.TypeFilter, out var invalidTypes)) { throw new InvalidArgumentsException($"Invalid object types: {invalidTypes}"); } var filter = new SsrsObjectFilter { ObjectTypes = new SsrsObjectTypesParser().GetTypeFilter(ObjectFilter.TypeFilter), Path = new PathFilterExpression( PathFilter.ParseGlob(ObjectFilter.IncludePaths) ?? PathFilter.MatchAll, PathFilter.ParseGlob(ObjectFilter.ExcludePaths) ), Site = Site }; var ssrsUri = new Uri(SsrsUriString, UriKind.Absolute); var service = ReportingServiceClientFactory.CreateFromShorthandUri(ssrsUri); var job = new ImportJob(service, source, filter) { Overwrite = Overwrite, BackupTarget = GetBackupTarget() }; foreach (var rule in RewriteRules) { var rewriter = new RewriteRuleParser().Parse(rule); job.Rewriters.Add(rewriter); } return(job); }
public void OnProgressUpdate(AsyncOperation asyncOperation) { if (asyncOperation.StatusCode.Value == (int)AsyncOperation_StatusCode.InProgress) { ImportJobManager jobManager = new ImportJobManager(Logger, OrganizationService); ImportJob importJob = jobManager.GetImportJob(ImportJobId, new ColumnSet("importjobid", "completedon", "progress")); if (importJob != null) { Logger.LogVerbose("Import Job Progress: {0}", importJob.Progress); } else { Logger.LogVerbose("Import job not found with Id: {0}", ImportJobId); } } }
public SolutionManagerPluginControl() { InitializeComponent(); var importJobRepository = ImportJob.GetRepository(); this.scheduler = new AsyncJobScheduler(this, this.viewModel); this.dataTreeView = new Views.DataTree.DataTreeView(this.viewModel); this.dataTreeView.Show(this.dockPanel, DockState.DockRight); this.timelineView = new Views.Timeline.TimelineView(); this.timelineView.Show(this.dockPanel, DockState.DockBottom); this.solutionProgressView = new Views.SolutionProgress.SolutionProgressView(this.scheduler, importJobRepository, this.viewModel, this.timelineView); this.solutionProgressView.Show(this.dockPanel, DockState.Document); this.tStartMonitoring.DataBindings.Add(nameof(this.tStartMonitoring.Enabled), this.viewModel, nameof(this.viewModel.CanStartMonitoring)); this.tStopMonitoring.DataBindings.Add(nameof(this.tStopMonitoring.Enabled), this.viewModel, nameof(this.viewModel.CanStopMonitoring)); this.tStoppingLabel.DataBindings.Add(nameof(this.tStoppingLabel.Visible), this.viewModel, nameof(this.viewModel.StopMonitoringRequested)); }
public async Task ImportMembersAsync(Guid id, CancellationToken cancellationToken = default(CancellationToken)) { ImportJob importJob = null; try { importJob = await _context.ImportJobs.FindAsync(id); if (importJob == null) { _logger.LogError("Invalid import job identifier: {id}", id); return; } var importModel = JsonConvert.DeserializeObject <MemberImportJobModel>(importJob.MappingJson); var dataList = await LoadData(importJob, importModel, cancellationToken); await ImportUsers(importJob, dataList, cancellationToken); var message = $"Processed member import request '{importModel.Name}'; Imported: {dataList.Count}"; await SendNotification(importJob, message, cancellationToken); } catch (Exception ex) { _logger.LogError(ex, "Error processing import '{id}': {message}", id, ex.Message); if (importJob == null) { throw; } var message = $"Error Processing member import request; {ex.GetBaseException().Message}"; await SendNotification(importJob, message, cancellationToken); throw; } }
public void InitializeForOpen() { // initialize if needed if (InnerItem == _originalItem) { IsInitializing = true; DetachEventListeners(); // Load complete item here _originalItem = LoadItem(_originalItem.ImportJobId); InnerItem = _originalItem.DeepClone(_entityFactory as IKnownSerializationTypes); // current workaround for repository crash //ItemRepository = Container.Resolve<IImportRepository>(); _itemRepository.Attach(InnerItem); InitializePropertiesForViewing(); if (InnerItem.EntityImporter != null) { SelectedEntityImporter = AllAvailableEntityImporters.First(x => x.Name == InnerItem.EntityImporter); } else { SelectedEntityImporter = AllAvailableEntityImporters.First(); } CsvFileColumns = GetCsvColumns(InnerItem.TemplateId); InnerItem.PropertyChanged += CurrentItem_PropertyChanged; //OnUIThread(() => //{ //}); } }
public void TestStandardLine() { var argsList = new string[][] { ("-s Wyoming -e 150 -v my-VariablesFile.txt -jobName Test-01 " + (char)8211 + "exportToShape").Split(' '), ("-s Wyoming -e 150 " + (char)8211 + "v my-VariablesFile.txt -jobName Test-01 -exportToShape").Split(' '), ((char)8211 + "s Wyoming -e 150 -v my-VariablesFile.txt -jobName Test-01 -exportToShape").Split(' ') }; for (int i = 0; i < argsList.Length; i++) { var args = argsList[i]; ImportJob job = new ImportJob(); if (!job.Load(args)) { Assert.Fail("Couldn't parse standard line for argsList[{0}]", i); } Assert.AreEqual(AcsState.Wyoming, job.State, "State is wrong for argsList[{0}]", i); Assert.AreEqual("my-VariablesFile.txt", job.IncludedVariableFile, "variables file is wrong for argsList[{0}]", i); Assert.AreEqual("Test-01", job.JobName, "Job name is wrong for argsList[{0}]", i); Assert.AreEqual(true.ToString(), job.ExportToShapefile, true.ToString(), "flag param is wrong for argsList[{0}]", i); } }
private async Task SaveImportJob(Guid id, string storageFile, MemberImportUploadCommand request, MemberImportJobModel importModel, CancellationToken cancellationToken) { var json = JsonConvert.SerializeObject(importModel); // create job var identityName = request.Principal?.Identity?.Name; var importJob = new ImportJob { Id = id, Type = "MemberImport", TenantId = request.TenantId, StorageFile = storageFile, MappingJson = json, Created = DateTimeOffset.UtcNow, CreatedBy = identityName, Updated = DateTimeOffset.UtcNow, UpdatedBy = identityName }; DataContext.ImportJobs.Add(importJob); await DataContext.SaveChangesAsync(cancellationToken); }
private void Import(ImportJob job, CsvReader reader, ImportResult result) { Dictionary<string, int> csvNames = GetCsvNamesAndIndexes(reader); ColumnMapping columnMapping = ColumnMappings.First(x => x.ColumnMappingId == job.ColumnMappingId); IEntityImporter importer = _entityImporters.FirstOrDefault(i => i.Name == job.EntityImporter); while (true) { try { string[] csvValues = reader.ReadRow(); if (csvValues == null) break; var systemValues = MapColumns(columnMapping.SystemPropertiesMap, csvNames, csvValues); var customValues = MapColumns(columnMapping.CustomPropertiesMap, csvNames, csvValues); importer.Import(job.ContainerId, columnMapping.PropertySetId, systemValues, customValues, _catalogRepository); result.CurrentProgress = reader.CurrentPosition; result.ProcessedRecordsCount++; } catch(Exception e) { result.ErrorsCount++; if (result.Errors == null) result.Errors = new List<string>(); result.Errors.Add(e.Message); //check if errors amount reached the allowed errors limit if yes do not save made changes. if (result.ErrorsCount >= job.MaxErrorsCount) { _catalogRepository.UnitOfWork.RollbackChanges(); break; } } } if (result.ErrorsCount < job.MaxErrorsCount) { _catalogRepository.UnitOfWork.Commit(); } }
public ImportJob AwaitImportJob( Guid importJobId, int asyncWaitTimeout, int sleepInterval, bool waitIfNotFound, IJobStatusUpdate statusUpdate ) { DateTime end = DateTime.Now.AddSeconds(asyncWaitTimeout); ImportJob importJob = null; bool completed = false; while (!completed) { if (end < DateTime.Now) { throw new Exception(string.Format("Import Timeout Exceeded: {0}", asyncWaitTimeout)); } Logger.LogVerbose(string.Format("Sleeping for {0} seconds", sleepInterval)); Thread.Sleep(sleepInterval * 1000); try { importJob = GetImportJob(importJobId, new ColumnSet("importjobid", "completedon", "progress")); if (importJob == null) { Logger.LogVerbose("Unable to find Import Job with Id {0}", importJobId); if (!waitIfNotFound) { completed = true; } } else { Logger.LogVerbose("Import Progress: {0}", importJob.Progress); if (importJob.CompletedOn.HasValue) { Logger.LogVerbose("Completed On: {0}", importJob.CompletedOn); completed = true; break; } } if (statusUpdate != null) { bool continueWaiting = statusUpdate.OnProgressUpdate(importJob); if (!continueWaiting) { Logger.LogVerbose("continueWaiting = false. Existing loop."); completed = true; } } } catch (Exception ex) { Logger.LogVerbose(ex.Message); } } //End of while loop return(importJob); }
public void Import(ImportAction action) { if (IsImporting) { return; } var customer = CustomerContext.Current.GetContactById(Guid.Empty); // instantiate the import job here so that it can capture the current EventContext instance. var importJob = new ImportJob(AppContext.Current.ApplicationId, CatalogPackagePath, "Catalog.xml", true); Action importAction = () => { IsImporting = true; try { if ((action & ImportAction.SiteContent) == ImportAction.SiteContent) { _progressMessenger.AddProgressMessageText("Importing Site content...", false, 0); doImportEpiData(SiteContentPath); _progressMessenger.AddProgressMessageText("Done importing Site content.", false, 5); } if ((action & ImportAction.AssetContent) == ImportAction.AssetContent) { _progressMessenger.AddProgressMessageText("Importing Asset content...", false, 10); doImportEpiData(AssetPath); _progressMessenger.AddProgressMessageText("Done importing Asset content.", false, 15); //Reindex the asset contents to make them searchable _progressMessenger.AddProgressMessageText("Start indexing asset contents.", false, 15); ServiceLocator.Current.GetInstance<ReIndexManager>().ReIndex(); _progressMessenger.AddProgressMessageText("Done indexing asset contents.", false, 20); } #region Import catalog and asset mapping if ((action & ImportAction.CatalogContent) == ImportAction.CatalogContent) { _progressMessenger.AddProgressMessageText("Importing Catalog content...", false, 20); Action<IBackgroundTaskMessage> addMessage = msg => { var isError = msg.MessageType == BackgroundTaskMessageType.Error; var percent = (int)Math.Round(msg.GetOverallProgress() * 100); var message = msg.Exception == null ? msg.Message : string.Format("{0} {1}", msg.Message, msg.ExceptionMessage); _progressMessenger.AddProgressMessageText(message, isError, percent); }; importJob.Execute(addMessage, CancellationToken.None); _progressMessenger.AddProgressMessageText("Done importing Catalog content", false, 60); //We are running in front-end site context, the metafield update events are ignored, we need to sync manually _progressMessenger.AddProgressMessageText("Syncing metaclasses with content types", false, 60); SyncMetaClassesToContentTypeModels(); _progressMessenger.AddProgressMessageText("Done syncing metaclasses with content types", false, 70); _progressMessenger.AddProgressMessageText("Rebuilding index...", false, 70); BuildIndex(_progressMessenger, AppContext.Current.ApplicationId, AppContext.Current.ApplicationName, true); _progressMessenger.AddProgressMessageText("Done rebuilding index", false, 90); } #endregion _progressMessenger.SetProgressDone(); IsDone = true; } catch (Exception ex) { var error = ex.Message + "<br />" + ex.StackTrace; _progressMessenger.AddProgressMessageText(error, true, 0); _progressMessenger.SetProgressFailed(); IsFailed = true; _log.Error("Import failed"); _log.Error(ex); throw; } }; Task.Factory.StartNew(importAction); }
private SolutionImportResult VerifySolutionImport( bool importAsync, Guid importJobId, AsyncOperation asyncOperation, Exception syncImportException) { SolutionImportResult result = new SolutionImportResult(); Logger.LogVerbose("Verifying Solution Import"); ImportJobManager jobManager = new ImportJobManager(Logger, OrganizationService); ImportJob importJob = jobManager.GetImportJob( importJobId, new ColumnSet("importjobid", "completedon", "progress", "data")); if (importJob == null) { result.ImportJobAvailable = false; if (importAsync) { result.ErrorMessage = asyncOperation != null ? asyncOperation.Message : ""; } else { result.ErrorMessage = syncImportException != null ? syncImportException.Message : ""; } Logger.LogError("Can't verify as import job couldn't be found. Error Message: {0}", result.ErrorMessage); return(result); } else { result.ImportJobAvailable = true; } if (importJob.Progress == 100) { Logger.LogInformation("Completed Progress: {0}", importJob.Progress); } else { Logger.LogWarning("Completed Progress: {0}", importJob.Progress); } Logger.LogInformation("Completed On: {0}", importJob.CompletedOn); XmlDocument doc = new XmlDocument(); doc.LoadXml(importJob.Data); XmlNode resultNode = doc.SelectSingleNode("//solutionManifest/result/@result"); String solutionImportResult = resultNode != null ? resultNode.Value : null; Logger.LogInformation("Import Result: {0}", solutionImportResult); XmlNode errorNode = doc.SelectSingleNode("//solutionManifest/result/@errortext"); String solutionImportError = errorNode != null ? errorNode.Value : null; Logger.LogInformation("Import Error: {0}", solutionImportError); result.ErrorMessage = solutionImportError; XmlNodeList unprocessedNodes = doc.SelectNodes("//*[@processed=\"false\"]"); result.UnprocessedComponents = unprocessedNodes.Count; if (unprocessedNodes.Count > 0) { Logger.LogWarning("Total number of unprocessed components: {0}", unprocessedNodes.Count); } else { Logger.LogInformation("Total number of unprocessed components: {0}", unprocessedNodes.Count); } if (solutionImportResult == ImportSuccess) { result.Success = true; } return(result); }
public ImportJobViewModelNew( NavigationManager navManager, ICatalogEntityFactory entityFactory, ICatalogRepository catalogRepository, IImportRepository importRepository, ImportJob item, WizardViewModelBare parentVM, IColumnMappingEditViewModel mappingEditVM, IColumnMappingViewModel columnMappingVM, IImportService importService, IPickAssetViewModel pickAssetVM, bool isSingleDialogEditing) { _entityFactory = entityFactory; InnerItem = _originalItem = item; _parentViewModel = parentVM; IsSingleDialogEditing = isSingleDialogEditing; _itemRepository = importRepository; _mappingEditVM = mappingEditVM; _columnMappingVM = columnMappingVM; _pickAssetVM = pickAssetVM; _importService = importService; _catalogRepository = catalogRepository; ViewTitle = new ViewTitleBase() { Title = "Import job", SubTitle = DisplayName.ToUpper() }; if (isSingleDialogEditing) { _originalItem = InnerItem; OpenItemCommand = new DelegateCommand(() => { NavigationData = new NavigationItem(InnerItem.ImportJobId, NavigationNames.HomeName, NavigationNames.MenuName, this); navManager.Navigate(NavigationData); }); CancelCommand = new DelegateCommand <object>(OnCancelCommand); SaveChangesCommand = new DelegateCommand <object>((x) => OnSaveChangesCommand(), (x) => { return(IsModified); }); MinimizeCommand = new DelegateCommand(() => MinimizableViewRequestedEvent(this, null)); } else { InitializePropertiesForViewing(); } FilePickCommand = new DelegateCommand(RaiseFilePickInteractionRequest); CreateMappingCommand = new DelegateCommand(RaiseCreateMappingInteractionRequest); CancelConfirmRequest = new InteractionRequest <Confirmation>(); CommonConfirmRequest = new InteractionRequest <Confirmation>(); CommonConfirmRequest2 = new InteractionRequest <Confirmation>(); UpdateImporterCommand = new DelegateCommand <EntityImporterBase>((x) => OnImporterChangesCommand(x)); UpdatePropertySetCommand = new DelegateCommand <PropertySet>((x) => OnPropertySetChangesCommand(x)); CatalogChangedCommand = new DelegateCommand <CatalogBase>((x) => OnCatalogChangesCommand(x)); ItemEditCommand = new DelegateCommand <MappingItem>((x) => RaiseItemEditInteractionRequest(x), x => x != null); ItemClearCommand = new DelegateCommand <MappingItem>((x) => RaiseItemClearInteractionRequest(x), x => x != null); }
private void ImportCatalog(string path) { var importJob = new ImportJob(AppContext.Current.ApplicationId, path, "Catalog.xml", true); Action importCatalog = () => { //_progressMessenger.AddProgressMessageText("Importing Catalog content...", false, 20); Action<IBackgroundTaskMessage> addMessage = msg => { var isError = msg.MessageType == BackgroundTaskMessageType.Error; var percent = (int)Math.Round(msg.GetOverallProgress() * 100); var message = msg.Exception == null ? msg.Message : string.Format("{0} {1}", msg.Message, msg.ExceptionMessage); //_progressMessenger.AddProgressMessageText(message, isError, percent); }; importJob.Execute(addMessage, CancellationToken.None); //_progressMessenger.AddProgressMessageText("Done importing Catalog content", false, 60); }; importCatalog(); //We are running in front-end site context, the metafield update events are ignored, we need to sync manually //_progressMessenger.AddProgressMessageText("Syncing metaclasses with content types", false, 60); SyncMetaClassesToContentTypeModels(); //_progressMessenger.AddProgressMessageText("Done syncing metaclasses with content types", false, 70); }