public static void Patch(this ExportPushNotification target, ExportProgressInfo source) { target.Description = source.Description; target.Errors = source.Errors; target.ProcessedCount = source.ProcessedCount; target.TotalCount = source.TotalCount; }
private static void ValidateParameters(ExportPushNotification pushNotification) { if (pushNotification == null) { throw new ArgumentNullException(nameof(pushNotification)); } }
public async Task ExportBackgroundAsync(ExportDataRequest request, ExportPushNotification pushNotification, IJobCancellationToken jobCancellationToken, PerformContext context) { ValidateParameters(pushNotification); try { await _customerDataExporter.ExportAsync(request, progressInfo => ProgressCallback(progressInfo, pushNotification, context), new JobCancellationTokenWrapper(jobCancellationToken)); } catch (JobAbortedException) { // job is aborted, do nothing } catch (Exception ex) { pushNotification.Errors.Add(ex.ExpandExceptionMessage()); } finally { pushNotification.Description = "Export finished"; pushNotification.Finished = DateTime.UtcNow; await _pushNotificationManager.SendAsync(pushNotification); } }
public async Task <ActionResult <PlatformExportPushNotification> > RunExport([FromBody] ExportDataRequest request) { var authorizationResult = await _authorizationService.AuthorizeAsync(User, request.DataQuery, request.ExportTypeName + "ExportDataPolicy"); if (!authorizationResult.Succeeded) { return(Unauthorized()); } var typeTitle = request.ExportTypeName.LastIndexOf('.') > 0 ? request.ExportTypeName.Substring(request.ExportTypeName.LastIndexOf('.') + 1) : request.ExportTypeName; var notification = new ExportPushNotification(_userNameResolver.GetCurrentUserName()) { NotifyType = "PlatformExportPushNotification", Title = $"{typeTitle} export", Description = "Starting export task..." }; _pushNotificationManager.Send(notification); var jobId = BackgroundJob.Enqueue <ExportJob>(x => x.ExportBackgroundAsync(request, notification, JobCancellationToken.Null, null)); notification.JobId = jobId; return(Ok(notification)); }
public static void Patch(this ExportPushNotification target, ExportProgressInfo source) { target.Description = source.Description; target.ProcessedCount = source.ProcessedCount; target.TotalCount = source.TotalCount; target.ContactsFileUrl = source.ContactsFileUrl; target.OrganizationsFileUrl = source.OrganizationsFileUrl; }
public void ExportBackground(ExportDataRequest request, ExportPushNotification notification, IJobCancellationToken cancellationToken, PerformContext context) { void progressCallback(ExportProgressInfo x) { notification.Patch(x); notification.JobId = context.BackgroundJob.Id; _pushNotificationManager.Upsert(notification); } try { var localTmpFolder = HostingEnvironment.MapPath(_defaultExportFolder); var fileName = Path.Combine(localTmpFolder, string.Format(FileNameTemplate, DateTime.UtcNow)); // Do not like provider creation here to get file extension, maybe need to pass created provider to Exporter. // Create stream inside Exporter is not good as it is not Exporter resposibility to decide where to write. var provider = _exportProviderFactory.CreateProvider(request); if (!string.IsNullOrEmpty(provider.ExportedFileExtension)) { fileName = Path.ChangeExtension(string.Format(FileNameTemplate, DateTime.UtcNow), provider.ExportedFileExtension); } var localTmpPath = Path.Combine(localTmpFolder, fileName); if (!Directory.Exists(localTmpFolder)) { Directory.CreateDirectory(localTmpFolder); } if (File.Exists(localTmpPath)) { File.Delete(localTmpPath); } //Import first to local tmp folder because Azure blob storage doesn't support some special file access mode using (var stream = File.OpenWrite(localTmpPath)) { _dataExporter.Export(stream, request, progressCallback, new JobCancellationTokenWrapper(cancellationToken)); notification.DownloadUrl = $"api/export/download/{fileName}"; } } catch (JobAbortedException) { //do nothing } catch (Exception ex) { notification.Errors.Add(ex.ExpandExceptionMessage()); } finally { notification.Description = "Export finished"; notification.Finished = DateTime.UtcNow; _pushNotificationManager.Upsert(notification); } }
public void DoUsageEventsExport(string storeId, ExportPushNotification notification) { var store = _storeService.GetById(storeId); if (store == null) { throw new NullReferenceException("store"); } DoExport(store.Name + " Events", store.Settings.GetSettingValue("Recommendations.UsageEvents.ChunkSize", 200) * 1024 * 1024, "events", () => LoadEvents(store).Select(x => new CsvUsageEvent(x)).ToArray(), new CsvUsageEventMap(), notification); }
public async Task ExportBackgroundAsync(ExportDataRequest request, ExportPushNotification notification, IJobCancellationToken cancellationToken, PerformContext context) { void progressCallback(ExportProgressInfo x) { notification.Patch(x); notification.JobId = context.BackgroundJob.Id; _pushNotificationManager.Send(notification); } try { if (string.IsNullOrEmpty(_platformOptions.DefaultExportFolder)) { throw new PlatformException($"{nameof(_platformOptions.DefaultExportFolder)} should be set."); } var fileName = string.Format(FileNameTemplate, DateTime.UtcNow); // Do not like provider creation here to get file extension, maybe need to pass created provider to Exporter. // Create stream inside Exporter is not good as it is not Exporter resposibility to decide where to write. var provider = _exportProviderFactory.CreateProvider(request); if (!string.IsNullOrEmpty(provider.ExportedFileExtension)) { fileName = Path.ChangeExtension(fileName, provider.ExportedFileExtension); } var url = UrlHelperExtensions.Combine(_platformOptions.DefaultExportFolder, fileName); using (var blobStream = _blobStorageProvider.OpenWrite(url)) { _dataExporter.Export(blobStream, request, progressCallback, new JobCancellationTokenWrapper(cancellationToken)); } notification.DownloadUrl = _blobUrlResolver.GetAbsoluteUrl(url); } catch (JobAbortedException) { //do nothing } catch (Exception ex) { notification.Errors.Add(ex.ExpandExceptionMessage()); } finally { notification.Description = "Export finished"; notification.Finished = DateTime.UtcNow; await _pushNotificationManager.SendAsync(notification); } }
public async Task <ActionResult <ExportPushNotification> > RunExport([FromBody] ExportDataRequest request) { var notification = new ExportPushNotification(_userNameResolver.GetCurrentUserName()) { Title = "Customers export", Description = "Starting export task..." }; await _pushNotificationManager.SendAsync(notification); notification.JobId = BackgroundJob.Enqueue <ExportJob>(importJob => importJob.ExportBackgroundAsync(request, notification, JobCancellationToken.Null, null)); return(Ok(notification)); }
private IHttpActionResult DoExport(string notifyType, string notificationDescription, Action <ExportPushNotification> job) { var notification = new ExportPushNotification(_userNameResolver.GetCurrentUserName(), notifyType) { Title = notificationDescription, Description = "Starting export..." }; _pushNotifier.Upsert(notification); job(notification); return(Ok(notification)); }
public void DoCatalogExport(string storeId, ExportPushNotification notification) { var store = _storeService.GetById(storeId); if (store == null) { throw new NullReferenceException("store"); } var catalog = _catalogService.GetById(store.Catalog); if (catalog == null) { throw new NullReferenceException("catalog"); } DoExport(catalog.Name, store.Settings.GetSettingValue("Recommendations.Catalog.ChunkSize", 200) * 1024 * 1024, "products", () => LoadProducts(store, catalog).Select(x => new CsvProduct(x)).ToArray(), new CsvProductMap(), notification); }
//[ResponseType(typeof(PlatformExportPushNotification))] public IHttpActionResult RunExport([FromBody] ExportDataRequest request) { if (_exportSecurityHandlerRegistrar.GetHandler(request.ExportTypeName + "ExportDataPolicy")?.Authorize(User.Identity.Name, request) != true) { return(Unauthorized()); } var notification = new ExportPushNotification(_userNameResolver.GetCurrentUserName()) { Title = $"{request.ExportTypeName} export task", Description = "starting export...." }; //_pushNotificationManager.Send(notification); var jobId = BackgroundJob.Enqueue <ExportJob>(x => x.ExportBackground(request, notification, JobCancellationToken.Null, null)); notification.JobId = jobId; return(Ok(notification)); }
private void ProgressCallback(ExportProgressInfo x, ExportPushNotification pushNotification, PerformContext context) { pushNotification.Patch(x); pushNotification.JobId = context.BackgroundJob.Id; _pushNotificationManager.Send(pushNotification); }
protected void DoExport <TCsvClass, TClass>(string fileName, int chunkSize, string entitiesType, Func <ICollection <TCsvClass> > entityFactory, CsvClassMap <TClass> entityClassMap, ExportPushNotification notification) { Action <ExportImportProgressInfo> progressCallback = x => { notification.Description = x.Description; notification.TotalCount = x.TotalCount; notification.ProcessedCount = x.ProcessedCount; notification.Errors = x.Errors; _pushNotifier.Upsert(notification); }; var progressInfo = new ExportImportProgressInfo { Description = string.Format("Loading {0}...", entitiesType) }; progressCallback(progressInfo); var updateProgress = new Action(() => { progressInfo.Description = string.Format("{0} of {1} {2} processed", progressInfo.ProcessedCount, progressInfo.TotalCount, entitiesType); progressCallback(progressInfo); }); var updateProgressWithThrottling = updateProgress.Throttle(TimeSpan.FromSeconds(1)); var relativeUrl = "temp/" + fileName + ".zip"; using (var blobStream = _blobStorageProvider.OpenWrite(relativeUrl)) { try { // Because of Recommendation API file uploading limit, we need to split our csv file to parts with size no more than this limit using (var archive = new ZipArchive(blobStream, ZipArchiveMode.Create, true, new UTF8Encoding(false))) { var partIndex = 1; using (var stream = new MemoryStream()) { using (var streamWriter = new StreamWriter(stream, new UTF8Encoding(false), 1024, true) { AutoFlush = true }) { using (var csvWriter = new CsvWriter(streamWriter)) { progressCallback(progressInfo); var entities = entityFactory().ToArray(); csvWriter.Configuration.Delimiter = ","; csvWriter.Configuration.RegisterClassMap(entityClassMap); progressInfo.TotalCount = entities.Length; for (var index = 0; index < entities.Length; index++) { try { var previousSize = (int)stream.Length; csvWriter.WriteRecord(entities[index]); if (stream.Length > chunkSize) { WriteEntry(archive, fileName, ref partIndex, x => x.Write(stream.GetBuffer(), 0, previousSize - 2)); stream.SetLength(0); --index; } } catch (Exception ex) { progressInfo.Errors.Add(ex.ToString()); progressCallback(progressInfo); } progressInfo.ProcessedCount = index + 1; updateProgressWithThrottling(); } } } WriteEntry(archive, fileName, ref partIndex, stream.WriteTo, true); } } updateProgress(); notification.DownloadUrl = _blobUrlResolver.GetAbsoluteUrl(relativeUrl); } catch (Exception ex) { notification.Description = "Export failed"; notification.Errors.Add(ex.ExpandExceptionMessage()); } finally { notification.Description = "Export finished"; notification.Finished = DateTime.UtcNow; _pushNotifier.Upsert(notification); } } }