/// <summary> /// Импорт фотографий /// </summary> /// <param name="provider"> /// Провайдер фотографий /// </param> /// <param name="progress"> /// Прогресс /// </param> /// <param name="token"> /// Токен отмены /// </param> public void Import(IPhotoProvider provider, IProgress<ImportProgress> progress, ICancellationToken token) { this.logService.Info("Начало импорта фотографий"); var settings = this.settingsService.GetSettings(); this.logService.Trace("Путь до альбома: {0}", settings.AlbumeRootPath); this.logService.Trace("Формат пути по дате съемки: {0}", settings.PhotoTakenDateToPathFormat); this.logService.Trace("Формат пути без даты съемки: {0}", settings.EmptyDatePathFormat); var total = provider.GetPhotos().Count(); var current = 0; foreach (var photo in provider.GetPhotos()) { if (token != null && token.IsCancellationRequested) { this.logService.Info("Импорт фотографий прерван"); break; } this.logService.Info("Импорт фотографии '{0}'", photo.FileName); var directory = this.GetDirectory(photo, settings); this.logService.Trace("Дата съемки: {0}", photo.DateTaken); this.logService.Trace("Папка импорта: '{0}'", directory); try { var path = this.CopyPhotoIfNeeded(directory, photo); OnImportProgress(progress, path, (++current * 100) / total); } catch (Exception e) { this.logService.Error(e, "Ошибка импорта фотографии '{0}'", photo.FileName); } } this.logService.Info("Завершение импорта фотографий"); }
protected virtual async Task <IndexingResult> ProcessDocumentsAsync(IndexDocumentChangeType changeType, IList <string> documentIds, BatchIndexingOptions batchOptions, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); IndexingResult result = null; if (changeType == IndexDocumentChangeType.Deleted) { result = await DeleteDocumentsAsync(batchOptions.DocumentType, documentIds.ToArray()); } else if (changeType == IndexDocumentChangeType.Modified) { result = await IndexDocumentsAsync(batchOptions.DocumentType, documentIds, batchOptions.PrimaryDocumentBuilder, batchOptions.SecondaryDocumentBuilders, cancellationToken); } return(result); }
protected virtual async Task ProcessConfigurationAsync(IndexDocumentConfiguration configuration, IndexingOptions options, Action <IndexingProgress> progressCallback, ICancellationToken cancellationToken) { if (configuration == null) { throw new ArgumentNullException(nameof(configuration)); } if (string.IsNullOrEmpty(configuration.DocumentType)) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentType)}"); } if (configuration.DocumentSource == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}"); } if (configuration.DocumentSource.ChangesProvider == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}.{nameof(configuration.DocumentSource.ChangesProvider)}"); } if (configuration.DocumentSource.DocumentBuilder == null) { throw new ArgumentNullException($"{nameof(configuration)}.{nameof(configuration.DocumentSource)}.{nameof(configuration.DocumentSource.DocumentBuilder)}"); } cancellationToken.ThrowIfCancellationRequested(); var documentType = options.DocumentType; progressCallback?.Invoke(new IndexingProgress($"{documentType}: calculating total count", documentType)); var batchOptions = new BatchIndexingOptions { DocumentType = options.DocumentType, PrimaryDocumentBuilder = configuration.DocumentSource.DocumentBuilder, SecondaryDocumentBuilders = configuration.RelatedSources ?.Where(s => s.DocumentBuilder != null) .Select(s => s.DocumentBuilder) .ToList(), }; var feeds = await GetChangeFeeds(configuration, options); // Try to get total count to indicate progress. Some feeds don't have a total count. var totalCount = feeds.Any(x => x.TotalCount == null) ? (long?)null : feeds.Sum(x => x.TotalCount ?? 0); long processedCount = 0; var changes = await GetNextChangesAsync(feeds); while (changes.Any()) { IList <string> errors = null; if (_backgroundWorker == null) { var indexingResult = await ProcessChangesAsync(changes, batchOptions, cancellationToken); errors = GetIndexingErrors(indexingResult); } else { // We're executing a job to index all documents or the changes since a specific time. // Priority for this indexation work should be quite low. var documentIds = changes .Select(x => x.DocumentId) .Distinct() .ToArray(); _backgroundWorker.IndexDocuments(configuration.DocumentType, documentIds, IndexingPriority.Background); } processedCount += changes.Count; var description = totalCount != null ? $"{documentType}: {processedCount} of {totalCount} have been indexed" : $"{documentType}: {processedCount} have been indexed"; progressCallback?.Invoke(new IndexingProgress(description, documentType, totalCount, processedCount, errors)); changes = await GetNextChangesAsync(feeds); } progressCallback?.Invoke(new IndexingProgress($"{documentType}: indexation finished", documentType, totalCount ?? processedCount, processedCount)); }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await _appBuilder.ApplicationServices.GetRequiredService <NotificationsExportImport>().DoImportAsync(inputStream, progressCallback, cancellationToken); }
public async Task DoImportAsync(Stream inputStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Promotions") { await reader.DeserializeJsonArrayWithPagingAsync <Promotion>(_jsonSerializer, _batchSize, items => _promotionService.SavePromotionsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } promotions have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentFolders") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentFolder>(_jsonSerializer, _batchSize, items => _dynamicContentService.SaveFoldersAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentItems") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentItem>(_jsonSerializer, _batchSize, items => _dynamicContentService.SaveContentItemsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content items have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentPlaces") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentPlace>(_jsonSerializer, _batchSize, items => _dynamicContentService.SavePlacesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content places have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "DynamicContentPublications") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicContentPublication>(_jsonSerializer, _batchSize, items => _dynamicContentService.SavePublicationsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } dynamic content publications have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Coupons") { await reader.DeserializeJsonArrayWithPagingAsync <Coupon>(_jsonSerializer, _batchSize, items => _couponService.SaveCouponsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } coupons have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "Usages") { await reader.DeserializeJsonArrayWithPagingAsync <PromotionUsage>(_jsonSerializer, _batchSize, items => _promotionUsageService.SaveUsagesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } usages have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="database">Client database to use for query.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(ClientDatabaseBase <HistorianKey, HistorianValue> database, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if ((object)cancellationToken == null) { cancellationToken = new CancellationToken(); } if ((object)database == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; } TimeSpan resolutionInterval = resolution.GetInterval(); SeekFilterBase <HistorianKey> timeFilter; MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); } else { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData <HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } Dictionary <ulong, DataRow> metadata = null; LocalOutputAdapter historianAdapter; if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out historianAdapter)) { metadata = historianAdapter?.Measurements; } if ((object)metadata == null) { yield break; } // Setup point ID selections if ((object)measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } // Start stream reader for the provided time window and selected points Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, long> intervals = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); double range = (stopTime - startTime).TotalSeconds; ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; DataRow row; if (resolutionSpan <= 1UL) { resolutionSpan = Ticks.PerSecond; } if (seriesLimit < 1) { seriesLimit = 1; } // Estimate total measurement counts per point so decimation intervals for each series can be calculated foreach (ulong measurementID in measurementIDs) { if (resolution == Resolution.Full) { pointCounts[measurementID] = metadata.TryGetValue(measurementID, out row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2; } else { pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D)); } } foreach (ulong measurementID in pointCounts.Keys) { intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L); } lock (database) { using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; if (pointCount++ % intervals[pointID] == 0 || (!forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan)) { yield return new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle } } ; pointCounts[pointID] = pointCount; lastTimes[pointID] = timestamp; } } } } }
public static void LoadAll(ModuleDef module, ICancellationToken cancellationToken) => new ModuleLoader(module, cancellationToken).Load();
public async Task ExportAsync(Stream outStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { await _applicationBuilder.ApplicationServices.GetRequiredService <SubscriptionExportImport>().DoExportAsync(outStream, progressCallback, cancellationToken); }
public async Task SynchronizeOrdersAsync(IIndexDocumentChangeFeed ordersFeed, Action <AvaTaxOrdersSynchronizationProgress> progressCallback, ICancellationToken cancellationToken) { // TODO: how to find order count when ordersFeed.TotalsCount is null? var totalCount = (long)ordersFeed.TotalCount; var progressInfo = new AvaTaxOrdersSynchronizationProgress { Message = "Reading orders...", TotalCount = totalCount, ProcessedCount = 0 }; progressCallback(progressInfo); cancellationToken?.ThrowIfCancellationRequested(); for (long i = 0; i < totalCount; i += BatchSize) { var searchResult = await ordersFeed.GetNextBatch(); var orderIds = searchResult.Select(x => x.DocumentId).ToArray(); var orders = await _orderService.GetByIdsAsync(orderIds); foreach (var order in orders) { var avaTaxSettings = await GetAvataxSettingsForOrder(order); if (avaTaxSettings != null) { _orderTaxTypeResolver.ResolveTaxTypeForOrderAsync(order); var avaTaxClient = _avaTaxClientFactory(avaTaxSettings); try { await SendOrderToAvaTax(order, avaTaxSettings.CompanyCode, avaTaxSettings.SourceAddress, avaTaxClient); } catch (AvaTaxError e) { var errorDetails = e.error.error; var joinedMessages = string.Join(Environment.NewLine, errorDetails.details.Select(x => $"{x.severity}: {x.message} {x.description}")); var errorMessage = $"Order #{order.Number}: {errorDetails.message}{Environment.NewLine}{joinedMessages}"; progressInfo.Errors.Add(errorMessage); } } else { var errorMessage = $"Order #{order.Number} was not sent to Avalara, because the order store does not use AvaTax as tax provider."; progressInfo.Errors.Add(errorMessage); } cancellationToken?.ThrowIfCancellationRequested(); } var processedCount = Math.Min(i + orderIds.Length, totalCount); progressInfo.ProcessedCount = processedCount; progressInfo.Message = $"Processed {processedCount} of {totalCount} orders"; progressCallback(progressInfo); } progressInfo.Message = "Orders synchronization completed."; progressCallback(progressInfo); }
protected virtual async Task <ICollection <BlobEntry> > ReadBlobFolderAsync(string folderPath, ICancellationToken token) { token?.ThrowIfCancellationRequested(); var result = new List <BlobEntry>(); var searchResults = await _storageProvider.SearchAsync(folderPath, null); result.AddRange(searchResults.Results); foreach (var blobFolder in searchResults.Results.Where(x => x.Type == "folder")) { var folderResult = await ReadBlobFolderAsync(blobFolder.RelativeUrl, token); result.AddRange(folderResult); } return(result); }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="server">The server to use for the query.</param> /// <param name="instanceName">Name of the archive to be queried.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable <TrendValue> GetHistorianData(SnapServer server, string instanceName, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if (cancellationToken == null) { cancellationToken = new CancellationToken(); } if (server == null) { yield break; } // Setting series limit to zero requests full resolution data, which overrides provided parameter if (seriesLimit < 1) { resolution = Resolution.Full; forceLimit = false; } TimeSpan resolutionInterval = resolution.GetInterval(); MatchFilterBase <HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); bool subFullResolution = false; // Set data scan resolution if (resolution != Resolution.Full) { subFullResolution = true; BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) { interval = BaselineTimeInterval.Second; } else if (resolutionInterval.Ticks < Ticks.PerHour) { interval = BaselineTimeInterval.Minute; } else if (resolutionInterval.Ticks == Ticks.PerHour) { interval = BaselineTimeInterval.Hour; } startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); } SeekFilterBase <HistorianKey> timeFilter = TimestampSeekFilter.CreateFromRange <HistorianKey>(startTime, stopTime); Dictionary <ulong, DataRow> metadata = null; using (SnapClient connection = SnapClient.Connect(server)) using (ClientDatabaseBase <HistorianKey, HistorianValue> database = connection.GetDatabase <HistorianKey, HistorianValue>(instanceName)) { if (database == null) { yield break; } if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out LocalOutputAdapter historianAdapter)) { metadata = historianAdapter?.Measurements; } if (metadata == null) { yield break; } // Setup point ID selections if (measurementIDs != null) { pointFilter = PointIdMatchFilter.CreateFromList <HistorianKey, HistorianValue>(measurementIDs); } else { measurementIDs = metadata.Keys.ToArray(); } Dictionary <ulong, long> pointCounts = new Dictionary <ulong, long>(measurementIDs.Length); Dictionary <ulong, ulong> lastTimes = new Dictionary <ulong, ulong>(measurementIDs.Length); Dictionary <ulong, Tuple <float, float> > extremes = new Dictionary <ulong, Tuple <float, float> >(measurementIDs.Length); ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; float pointValue, min = 0.0F, max = 0.0F; foreach (ulong measurementID in measurementIDs) { pointCounts[measurementID] = 0L; } // Start stream reader for the provided time window and selected points using (TreeStream <HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter)) { while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; pointValue = value.AsSingle; if (subFullResolution) { Tuple <float, float> stats = extremes.GetOrAdd(pointID, _ => new Tuple <float, float>(float.MaxValue, float.MinValue)); min = stats.Item1; max = stats.Item2; if (pointValue < min) { min = pointValue; } if (pointValue > max) { max = pointValue; } if (min != float.MaxValue && max != float.MinValue) { pointValue = Math.Abs(max) > Math.Abs(min) ? max : min; } else if (min != float.MaxValue) { pointValue = min; } else if (max != float.MinValue) { pointValue = max; } } if (timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan) { pointCount++; if (forceLimit && pointCount > seriesLimit) { break; } yield return(new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = pointValue }); lastTimes[pointID] = timestamp; // Reset extremes at each point publication if (subFullResolution) { extremes[pointID] = new Tuple <float, float>(float.MaxValue, float.MinValue); } } else if (subFullResolution) { // Track extremes over interval extremes[pointID] = new Tuple <float, float>(min, max); } pointCounts[pointID] = pointCount; } } } }
public virtual async Task ImportAsync(ImportDataRequest request, Action <ImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { ValidateParameters(request, progressCallback, cancellationToken); var errorsContext = new ImportErrorsContext(); var csvPriceDataValidationResult = await _dataValidator.ValidateAsync <TCsvMember>(request.FilePath); if (csvPriceDataValidationResult.Errors.Any()) { throw new InvalidDataException(); } var configuration = new ImportConfiguration(); var reportFilePath = GetReportFilePath(request.FilePath); await using var importReporter = await _importReporterFactory.CreateAsync(reportFilePath, configuration.Delimiter); cancellationToken.ThrowIfCancellationRequested(); var importProgress = new ImportProgressInfo { Description = "Import has started" }; using var dataSource = await _dataSourceFactory.CreateAsync <TCsvMember, TMember>(request.FilePath, ModuleConstants.Settings.PageSize, configuration); var headerRaw = dataSource.GetHeaderRaw(); if (!headerRaw.IsNullOrEmpty()) { await importReporter.WriteHeaderAsync(headerRaw); } importProgress.TotalCount = dataSource.GetTotalCount(); progressCallback(importProgress); const string importDescription = "{0} out of {1} have been imported."; SetupErrorHandlers(progressCallback, configuration, errorsContext, importProgress, importReporter); try { importProgress.Description = "Fetching..."; progressCallback(importProgress); while (await dataSource.FetchAsync()) { await ProcessChunkAsync(request, progressCallback, dataSource, errorsContext, importProgress, importReporter); if (importProgress.ProcessedCount != importProgress.TotalCount) { importProgress.Description = string.Format(importDescription, importProgress.ProcessedCount, importProgress.TotalCount); progressCallback(importProgress); } } } catch (Exception e) { HandleError(progressCallback, importProgress, e.Message); } finally { var completedMessage = importProgress.ErrorCount > 0 ? "Import completed with errors" : "Import completed"; importProgress.Description = $"{completedMessage}: {string.Format(importDescription, importProgress.ProcessedCount, importProgress.TotalCount)}"; if (importReporter.ReportIsNotEmpty) { importProgress.ReportUrl = _blobUrlResolver.GetAbsoluteUrl(reportFilePath); } progressCallback(importProgress); } }
private static void ValidateParameters(ImportDataRequest request, Action <ImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { if (request == null) { throw new ArgumentNullException(nameof(request)); } if (progressCallback == null) { throw new ArgumentNullException(nameof(progressCallback)); } if (cancellationToken == null) { throw new ArgumentNullException(nameof(cancellationToken)); } }
public async Task DoImportAsync(Stream inputStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = "Importing dynamic associations…"; progressCallback(progressInfo); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType != JsonToken.PropertyName) { continue; } if (reader.Value.ToString() == "DynamicAssociations") { await reader.DeserializeJsonArrayWithPagingAsync <Association>(_serializer, _batchSize, async items => { await _associationService.SaveChangesAsync(items.ToArray()); }, processedCount => { progressInfo.Description = $"{processedCount} Tagged items have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } }
public async Task DoExportAsync(Stream outStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "Loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); await writer.WritePropertyNameAsync("DynamicAssociations"); await writer.SerializeJsonArrayWithPagingAsync(_serializer, _batchSize, async (skip, take) => (GenericSearchResult <Association>) await _associationSearchService.SearchAssociationsAsync(new AssociationSearchCriteria { Skip = skip, Take = take }) , (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic associations have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
private void QueueStatusUpdate(StatusUpdate update) { const int HighPriority = 2; Action processAction; m_statusUpdateQueue.Add(update); if (m_logStatusUpdates) m_statusLog.WriteTimestampedLine(update.Message); if (m_queueCancellationToken == null) { processAction = () => m_statusUpdateThread.Push(HighPriority, ProcessStatusUpdates); m_queueCancellationToken = processAction.DelayAndExecute(250); } }
protected virtual async Task <IList <IndexDocument> > GetDocumentsAsync(IList <string> documentIds, IIndexDocumentBuilder primaryDocumentBuilder, IEnumerable <IIndexDocumentBuilder> additionalDocumentBuilders, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); List <IndexDocument> primaryDocuments; if (primaryDocumentBuilder == null) { primaryDocuments = documentIds.Select(x => new IndexDocument(x)).ToList(); } else { primaryDocuments = (await primaryDocumentBuilder.GetDocumentsAsync(documentIds)) ?.Where(x => x != null) .ToList(); } if (primaryDocuments?.Any() == true) { if (additionalDocumentBuilders != null) { var primaryDocumentIds = primaryDocuments.Select(d => d.Id).ToArray(); var secondaryDocuments = await GetSecondaryDocumentsAsync(additionalDocumentBuilders, primaryDocumentIds, cancellationToken); MergeDocuments(primaryDocuments, secondaryDocuments); } // Add system fields foreach (var document in primaryDocuments) { document.Add(new IndexDocumentField(KnownDocumentFields.IndexationDate, DateTime.UtcNow) { IsRetrievable = true, IsFilterable = true }); } } return(primaryDocuments); }
public async Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var importJob = _applicationBuilder.ApplicationServices.GetRequiredService <PricingExportImport>(); await importJob.DoImportAsync(inputStream, progressCallback, cancellationToken); }
public CancellableBitmapSource(IBitmapSource <TPixel> source, Func <RectInt32, IEnumerable <RectInt32> > sourceRectSplitter, Action <RectInt32> rectCompletedCallback, ICancellationToken cancelToken) { Validate.Begin().IsNotNull <IBitmapSource <TPixel> >(source, "source").IsNotNull <Func <RectInt32, IEnumerable <RectInt32> > >(sourceRectSplitter, "sourceRectSplitter").IsNotNull <ICancellationToken>(cancelToken, "cancelToken").Check(); this.source = source.CreateRef <TPixel>(); this.sourceSize = this.source.Size; this.sourceRectSplitter = sourceRectSplitter; this.rectCompletedCallback = rectCompletedCallback; this.cancelToken = cancelToken; TPixel local = default(TPixel); this.bytesPerPixel = local.BytesPerPixel; }
public async Task ImportAsync(Stream stream, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { if (manifest == null) { throw new ArgumentNullException(nameof(manifest)); } var progressInfo = new ExportImportProgressInfo(); progressInfo.Description = "Starting platform import..."; progressCallback(progressInfo); using (var zipArchive = new ZipArchive(stream, ZipArchiveMode.Read, true)) using (EventSuppressor.SupressEvents()) { //Import selected platform entries await ImportPlatformEntriesInternalAsync(zipArchive, manifest, progressCallback, cancellationToken); //Import selected modules await ImportModulesInternalAsync(zipArchive, manifest, progressCallback, cancellationToken); } }
public Task ImportAsync(Stream inputStream, ExportImportOptions options, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { return(_appBuilder.ApplicationServices.GetRequiredService <OrderExportImport>().ImportAsync(inputStream, options, progressCallback, cancellationToken)); }
private async Task ImportPlatformEntriesInternalAsync(ZipArchive zipArchive, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var progressInfo = new ExportImportProgressInfo(); var jsonSerializer = GetJsonSerializer(); var batchSize = 20; var platformZipEntries = zipArchive.GetEntry(PlatformZipEntryName); if (platformZipEntries != null) { using (var stream = platformZipEntries.Open()) { using (var streamReader = new StreamReader(stream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (manifest.HandleSecurity && reader.Value.ToString().EqualsInvariant("Roles")) { await reader.DeserializeJsonArrayWithPagingAsync <Role>(jsonSerializer, batchSize, async items => { foreach (var role in items) { if (await _roleManager.RoleExistsAsync(role.Name)) { await _roleManager.UpdateAsync(role); } else { await _roleManager.CreateAsync(role); } } }, processedCount => { progressInfo.Description = $"{ processedCount } roles have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (manifest.HandleSecurity && reader.Value.ToString().EqualsInvariant("Users")) { await reader.DeserializeJsonArrayWithPagingAsync <ApplicationUser>(jsonSerializer, batchSize, async items => { foreach (var user in items) { var userExist = await _userManager.FindByIdAsync(user.Id); if (userExist != null) { await _userManager.UpdateAsync(user); } else { await _userManager.CreateAsync(user); } } }, processedCount => { progressInfo.Description = $"{ processedCount } roles have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (manifest.HandleSettings && reader.Value.ToString() == "Settings") { await reader.DeserializeJsonArrayWithPagingAsync <ObjectSettingEntry>(jsonSerializer, int.MaxValue, async items => { var arrayItems = items.ToArray(); foreach (var module in manifest.Modules) { await _settingsManager.SaveObjectSettingsAsync(arrayItems.Where(x => x.ModuleId == module.Id).ToArray()); } }, processedCount => { progressInfo.Description = $"{ processedCount } coupons have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (manifest.HandleSettings && reader.Value.ToString() == "DynamicProperties") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicProperty>(jsonSerializer, batchSize, items => _dynamicPropertyService.SaveDynamicPropertiesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } coupons have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (manifest.HandleSettings && reader.Value.ToString() == "DynamicPropertyDictionaryItems") { await reader.DeserializeJsonArrayWithPagingAsync <DynamicPropertyDictionaryItem>(jsonSerializer, batchSize, items => _dynamicPropertyService.SaveDictionaryItemsAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } coupons have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } } } }
private void MakeConfigurationChanges(Action configurationChangeAction) { m_configurationChangedToken?.Cancel(); configurationChangeAction.TryExecute(OnProcessException); m_configurationChangedToken = new Action(OnConfigurationChanged).DelayAndExecute(ConfigurationChangedTimeout); }
private async Task ExportPlatformEntriesInternalAsync(ZipArchive zipArchive, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var progressInfo = new ExportImportProgressInfo(); var serializer = GetJsonSerializer(); //Create part for platform entries var platformEntiriesPart = zipArchive.CreateEntry(PlatformZipEntryName, CompressionLevel.Optimal); using (var partStream = platformEntiriesPart.Open()) { using (var sw = new StreamWriter(partStream, Encoding.UTF8)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); if (manifest.HandleSecurity) { progressInfo.Description = "Roles exporting..."; progressCallback(progressInfo); cancellationToken.ThrowIfCancellationRequested(); await writer.WritePropertyNameAsync("Roles"); await writer.WriteStartArrayAsync(); var roles = _roleManager.Roles.ToList(); if (_roleManager.SupportsRoleClaims) { var permissions = _permissionsProvider.GetAllPermissions().ToArray(); foreach (var role in roles) { role.Permissions = (await _roleManager.GetClaimsAsync(role)).Join(permissions, c => c.Value, p => p.Name, (c, p) => p).ToArray(); serializer.Serialize(writer, role); } writer.Flush(); progressInfo.Description = $"{ roles.Count } roles exported"; progressCallback(progressInfo); } await writer.WriteEndArrayAsync(); cancellationToken.ThrowIfCancellationRequested(); await writer.WritePropertyNameAsync("Users"); await writer.WriteStartArrayAsync(); var usersResult = _userManager.Users.ToArray(); progressInfo.Description = $"Security: {usersResult.Length} users exporting..."; progressCallback(progressInfo); var userExported = 0; foreach (var user in usersResult) { var userExt = await _userManager.FindByIdAsync(user.Id); if (userExt != null) { serializer.Serialize(writer, userExt); userExported++; } } await writer.FlushAsync(); progressInfo.Description = $"{ userExported } of { usersResult.Length } users exported"; progressCallback(progressInfo); await writer.WriteEndArrayAsync(); } if (manifest.HandleSettings) { cancellationToken.ThrowIfCancellationRequested(); await writer.WritePropertyNameAsync("Settings"); await writer.WriteStartArrayAsync(); progressInfo.Description = "Settings: selected modules settings exporting..."; progressCallback(progressInfo); foreach (var module in manifest.Modules) { var moduleSettings = await _settingsManager.GetObjectSettingsAsync(_settingsManager.AllRegisteredSettings.Where(x => x.ModuleId == module.Id).Select(x => x.Name)); //Export only settings with set values foreach (var setting in moduleSettings.Where(x => x.ItHasValues)) { serializer.Serialize(writer, setting); } await writer.FlushAsync(); } progressInfo.Description = $"Settings of modules exported"; progressCallback(progressInfo); await writer.WriteEndArrayAsync(); } cancellationToken.ThrowIfCancellationRequested(); await writer.WritePropertyNameAsync("DynamicProperties"); await writer.WriteStartArrayAsync(); progressInfo.Description = "Dynamic properties: load properties..."; progressCallback(progressInfo); var dynamicProperties = (await _dynamicPropertySearchService.SearchDynamicPropertiesAsync(new DynamicPropertySearchCriteria { Take = int.MaxValue })).Results; foreach (var dynamicProperty in dynamicProperties) { serializer.Serialize(writer, dynamicProperty); } progressInfo.Description = $"Dynamic properties exported"; progressCallback(progressInfo); await writer.WriteEndArrayAsync(); cancellationToken.ThrowIfCancellationRequested(); await writer.WritePropertyNameAsync("DynamicPropertyDictionaryItems"); await writer.WriteStartArrayAsync(); progressInfo.Description = "Dynamic properties Dictionary Items: load properties..."; progressCallback(progressInfo); var dynamicPropertyDictionaryItems = (await _dynamicPropertySearchService.SearchDictionaryItemsAsync(new DynamicPropertyDictionaryItemSearchCriteria { Take = int.MaxValue })).Results; foreach (var dynamicPropertyDictionaryItem in dynamicPropertyDictionaryItems) { serializer.Serialize(writer, dynamicPropertyDictionaryItem); } progressInfo.Description = $"Dynamic properties dictionary items exported"; progressCallback(progressInfo); await writer.WriteEndArrayAsync(); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } } }
public async Task DoExportAsync(Stream outStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); progressInfo.Description = "Promotions exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Promotions"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _promotionSearchService.SearchPromotionsAsync(new PromotionSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } promotions have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content folders exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentFolders"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _dynamicContentSearchService.SearchFoldersAsync(new DynamicContentFolderSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content folders have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content items exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentItems"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _dynamicContentSearchService.SearchContentItemsAsync(new DynamicContentItemSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content items have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content places exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentPlaces"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _dynamicContentSearchService.SearchContentPlacesAsync(new DynamicContentPlaceSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content places have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Dynamic content publications exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("DynamicContentPublications"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _dynamicContentSearchService.SearchContentPublicationsAsync(new DynamicContentPublicationSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } dynamic content publications have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Coupons exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Coupons"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _couponService.SearchCouponsAsync(new CouponSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } coupons have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Usages exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Usages"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _promotionUsageService.SearchUsagesAsync(new PromotionUsageSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } usages have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
private async Task ImportModulesInternalAsync(ZipArchive zipArchive, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var progressInfo = new ExportImportProgressInfo(); foreach (var moduleInfo in manifest.Modules) { var moduleDescriptor = InnerGetModulesWithInterface(typeof(IImportSupport)).FirstOrDefault(x => x.Id == moduleInfo.Id); if (moduleDescriptor != null) { var modulePart = zipArchive.GetEntry(moduleInfo.PartUri.TrimStart('/')); using (var modulePartStream = modulePart.Open()) { void ModuleProgressCallback(ExportImportProgressInfo x) { progressInfo.Description = $"{moduleInfo.Id}: {x.Description}"; progressInfo.Errors = x.Errors; progressCallback(progressInfo); } if (moduleDescriptor.ModuleInstance is IImportSupport importer) { try { //TODO: Add JsonConverter which will be materialized concrete ExportImport option type var options = manifest.Options .DefaultIfEmpty(new ExportImportOptions { HandleBinaryData = manifest.HandleBinaryData, ModuleIdentity = new ModuleIdentity(moduleDescriptor.Identity.Id, moduleDescriptor.Identity.Version) }) .FirstOrDefault(x => x.ModuleIdentity.Id == moduleDescriptor.Identity.Id); await importer.ImportAsync(modulePartStream, options, ModuleProgressCallback, cancellationToken); } catch (Exception ex) { progressInfo.Errors.Add($"{moduleInfo.Id}: {ex}"); progressCallback(progressInfo); } } } } } }
/// <summary> /// Generates thumbnails asynchronously /// </summary> /// <param name="sourcePath">Path to source picture</param> /// <param name="destPath">Target for generated thumbnail</param> /// <param name="options">Represents generation options</param> /// <param name="token">Allows cancel operation</param> /// <returns></returns> public async Task <ThumbnailGenerationResult> GenerateThumbnailsAsync(string sourcePath, string destPath, IList <ThumbnailOption> options, ICancellationToken token) { token?.ThrowIfCancellationRequested(); var originalImage = await _imageService.LoadImageAsync(sourcePath); if (originalImage == null) { return(new ThumbnailGenerationResult { Errors = { $"Cannot generate thumbnail: {sourcePath} does not have a valid image format" } }); } var result = new ThumbnailGenerationResult(); var format = _imageService.GetImageFormat(originalImage); //one process only can use an Image object at the same time. Image clone; lock (_progressLock) { clone = (Image)originalImage.Clone(); } foreach (var option in options) { var thumbnail = GenerateThumbnail(clone, option); var thumbnailUrl = sourcePath.GenerateThumbnailName(option.FileSuffix); if (thumbnail != null) { await _imageService.SaveImage(thumbnailUrl, thumbnail, format); } else { throw new Exception($"Cannot save thumbnail image {thumbnailUrl}"); } result.GeneratedThumbnails.Add(thumbnailUrl); } return(result); }
private async Task ExportModulesInternalAsync(ZipArchive zipArchive, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { var progressInfo = new ExportImportProgressInfo(); foreach (var module in manifest.Modules) { var moduleDescriptor = InnerGetModulesWithInterface(typeof(IImportSupport)).FirstOrDefault(x => x.Id == module.Id); if (moduleDescriptor != null) { //Create part for module var moduleZipEntryName = module.Id + ".json"; var zipEntry = zipArchive.CreateEntry(moduleZipEntryName, CompressionLevel.Optimal); void ModuleProgressCallback(ExportImportProgressInfo x) { progressInfo.Description = $"{ module.Id }: { x.Description }"; progressInfo.Errors = x.Errors; progressCallback(progressInfo); } progressInfo.Description = $"{module.Id}: exporting..."; progressCallback(progressInfo); if (moduleDescriptor.ModuleInstance is IExportSupport exporter) { try { //TODO: Add JsonConverter which will be materialized concrete ExportImport option type //ToDo: Added check ExportImportOptions for modules (DefaultIfEmpty) var options = manifest.Options .DefaultIfEmpty(new ExportImportOptions { HandleBinaryData = manifest.HandleBinaryData, ModuleIdentity = new ModuleIdentity(module.Id, module.Version) }) .FirstOrDefault(x => x.ModuleIdentity.Id == moduleDescriptor.Identity.Id); await exporter.ExportAsync(zipEntry.Open(), options, ModuleProgressCallback, cancellationToken); } catch (Exception ex) { progressInfo.Errors.Add($"{ module.Id}: {ex}"); progressCallback(progressInfo); } } module.PartUri = moduleZipEntryName; } } }
protected virtual async Task <IndexingResult> ProcessChangesAsync(IEnumerable <IndexDocumentChange> changes, BatchIndexingOptions batchOptions, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var result = new IndexingResult(); var groups = GetLatestChangesForEachDocumentGroupedByChangeType(changes); foreach (var group in groups) { var changeType = group.Key; var documentIds = group.Value; var groupResult = await ProcessDocumentsAsync(changeType, documentIds, batchOptions, cancellationToken); if (groupResult?.Items != null) { if (result.Items == null) { result.Items = new List <IndexingResultItem>(); } result.Items.AddRange(groupResult.Items); } } return(result); }
public async Task ExportAsync(Stream outStream, PlatformExportManifest manifest, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { if (manifest == null) { throw new ArgumentNullException(nameof(manifest)); } using (var zipArchive = new ZipArchive(outStream, ZipArchiveMode.Create, true)) { //Export all selected platform entries await ExportPlatformEntriesInternalAsync(zipArchive, manifest, progressCallback, cancellationToken); //Export all selected modules await ExportModulesInternalAsync(zipArchive, manifest, progressCallback, cancellationToken); //Write system information about exported modules var manifestZipEntry = zipArchive.CreateEntry(ManifestZipEntryName, CompressionLevel.Optimal); //After all modules exported need write export manifest part using (var stream = manifestZipEntry.Open()) { manifest.SerializeJson(stream, GetJsonSerializer()); } } }
public async Task DoExportAsync(Stream outStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo { Description = "loading data..." }; progressCallback(progressInfo); using (var sw = new StreamWriter(outStream)) using (var writer = new JsonTextWriter(sw)) { await writer.WriteStartObjectAsync(); progressInfo.Description = "Site maps exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("Sitemaps"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _sitemapService.SearchAsync(new SitemapSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } site maps have been exported"; progressCallback(progressInfo); }, cancellationToken); progressInfo.Description = "Site map items exporting..."; progressCallback(progressInfo); await writer.WritePropertyNameAsync("SitemapItems"); await writer.SerializeJsonArrayWithPagingAsync(_jsonSerializer, _batchSize, (skip, take) => _sitemapItemService.SearchAsync(new SitemapItemSearchCriteria { Skip = skip, Take = take }), (processedCount, totalCount) => { progressInfo.Description = $"{ processedCount } of { totalCount } site maps items have been exported"; progressCallback(progressInfo); }, cancellationToken); await writer.WriteEndObjectAsync(); await writer.FlushAsync(); } }
public async Task DoImportAsync(Stream inputStream, Action <ExportImportProgressInfo> progressCallback, ICancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var progressInfo = new ExportImportProgressInfo(); using (var streamReader = new StreamReader(inputStream)) using (var reader = new JsonTextReader(streamReader)) { while (reader.Read()) { if (reader.TokenType == JsonToken.PropertyName) { if (reader.Value.ToString() == "Sitemaps") { await reader.DeserializeJsonArrayWithPagingAsync <Sitemap>(_jsonSerializer, _batchSize, items => _sitemapService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } site maps have been imported"; progressCallback(progressInfo); }, cancellationToken); } else if (reader.Value.ToString() == "SitemapItems") { await reader.DeserializeJsonArrayWithPagingAsync <SitemapItem>(_jsonSerializer, _batchSize, items => _sitemapItemService.SaveChangesAsync(items.ToArray()), processedCount => { progressInfo.Description = $"{ processedCount } site maps items have been imported"; progressCallback(progressInfo); }, cancellationToken); } } } } }
private void ProcessStatusUpdates() { List<StatusUpdate> updates = new List<StatusUpdate>(m_statusUpdateQueue); foreach (ClientStatusUpdateConfiguration clientConfig in m_clientStatusUpdateLookup.Values) clientConfig.SendBroadcastUpdates(updates); m_statusUpdateQueue.Clear(); m_queueCancellationToken = null; }
/// <summary> /// Read historian data from server. /// </summary> /// <param name="database">Client database to use for query.</param> /// <param name="startTime">Start time of query.</param> /// <param name="stopTime">Stop time of query.</param> /// <param name="measurementIDs">Measurement IDs to query - or <c>null</c> for all available points.</param> /// <param name="resolution">Resolution for data query.</param> /// <param name="seriesLimit">Maximum number of points per series.</param> /// <param name="forceLimit">Flag that determines if series limit should be strictly enforced.</param> /// <param name="cancellationToken">Cancellation token for query.</param> /// <returns>Enumeration of <see cref="TrendValue"/> instances read for time range.</returns> public static IEnumerable<TrendValue> GetHistorianData(ClientDatabaseBase<HistorianKey, HistorianValue> database, DateTime startTime, DateTime stopTime, ulong[] measurementIDs, Resolution resolution, int seriesLimit, bool forceLimit, ICancellationToken cancellationToken = null) { if ((object)cancellationToken == null) cancellationToken = new CancellationToken(); if ((object)database == null) yield break; TimeSpan resolutionInterval = resolution.GetInterval(); SeekFilterBase<HistorianKey> timeFilter; MatchFilterBase<HistorianKey, HistorianValue> pointFilter = null; HistorianKey key = new HistorianKey(); HistorianValue value = new HistorianValue(); // Set data scan resolution if (resolution == Resolution.Full) { timeFilter = TimestampSeekFilter.CreateFromRange<HistorianKey>(startTime, stopTime); } else { BaselineTimeInterval interval = BaselineTimeInterval.Second; if (resolutionInterval.Ticks < Ticks.PerMinute) interval = BaselineTimeInterval.Second; else if (resolutionInterval.Ticks < Ticks.PerHour) interval = BaselineTimeInterval.Minute; else if (resolutionInterval.Ticks == Ticks.PerHour) interval = BaselineTimeInterval.Hour; startTime = startTime.BaselinedTimestamp(interval); stopTime = stopTime.BaselinedTimestamp(interval); timeFilter = TimestampSeekFilter.CreateFromIntervalData<HistorianKey>(startTime, stopTime, resolutionInterval, new TimeSpan(TimeSpan.TicksPerMillisecond)); } Dictionary<ulong, DataRow> metadata = null; LocalOutputAdapter historianAdapter; if (LocalOutputAdapter.Instances.TryGetValue(database.Info?.DatabaseName ?? DefaultInstanceName, out historianAdapter)) metadata = historianAdapter?.Measurements; if ((object)metadata == null) yield break; // Setup point ID selections if ((object)measurementIDs != null) pointFilter = PointIdMatchFilter.CreateFromList<HistorianKey, HistorianValue>(measurementIDs); else measurementIDs = metadata.Keys.ToArray(); // Start stream reader for the provided time window and selected points Dictionary<ulong, long> pointCounts = new Dictionary<ulong, long>(measurementIDs.Length); Dictionary<ulong, long> intervals = new Dictionary<ulong, long>(measurementIDs.Length); Dictionary<ulong, ulong> lastTimes = new Dictionary<ulong, ulong>(measurementIDs.Length); double range = (stopTime - startTime).TotalSeconds; ulong pointID, timestamp, resolutionSpan = (ulong)resolutionInterval.Ticks, baseTicks = (ulong)UnixTimeTag.BaseTicks.Value; long pointCount; DataRow row; if (resolutionSpan <= 1UL) resolutionSpan = Ticks.PerSecond; if (seriesLimit < 1) seriesLimit = 1; // Estimate total measurement counts per point so decimation intervals for each series can be calculated foreach (ulong measurementID in measurementIDs) { if (resolution == Resolution.Full) pointCounts[measurementID] = metadata.TryGetValue(measurementID, out row) ? (long)(int.Parse(row["FramesPerSecond"].ToString()) * range) : 2; else pointCounts[measurementID] = (long)(range / resolutionInterval.TotalSeconds.NotZero(1.0D)); } foreach (ulong measurementID in pointCounts.Keys) intervals[measurementID] = (pointCounts[measurementID] / seriesLimit).NotZero(1L); lock (database) { TreeStream<HistorianKey, HistorianValue> stream = database.Read(SortedTreeEngineReaderOptions.Default, timeFilter, pointFilter); while (stream.Read(key, value) && !cancellationToken.IsCancelled) { pointID = key.PointID; timestamp = key.Timestamp; pointCount = pointCounts[pointID]; if (pointCount++ % intervals[pointID] == 0 || (!forceLimit && timestamp - lastTimes.GetOrAdd(pointID, 0UL) > resolutionSpan)) yield return new TrendValue { ID = (long)pointID, Timestamp = (timestamp - baseTicks) / (double)Ticks.PerMillisecond, Value = value.AsSingle }; pointCounts[pointID] = pointCount; lastTimes[pointID] = timestamp; } } }