示例#1
0
 private static Task ObserveDiscardedTask(FutureIndexBatch source)
 {
     return(source.Task.ContinueWith(task =>
     {
         if (task.Exception != null)
         {
             log.WarnException("Error happened on discarded future work batch", task.Exception);
         }
         else
         {
             log.Warn("WASTE: Discarding future work item without using it, to reduce memory usage");
         }
     }));
 }
示例#2
0
		private static Task ObserveDiscardedTask(FutureIndexBatch source)
		{
			return source.Task.ContinueWith(task =>
			{
				if (task.Exception != null)
				{
					log.WarnException("Error happened on discarded future work batch", task.Exception);
				}
				else
				{
					log.Warn("WASTE: Discarding future work item without using it, to reduce memory usage");
				}
			});
		}
示例#3
0
        private bool AddFutureBatch(Etag nextEtag, Etag untilEtag, 
            FutureBatchType batchType, int? docsCount = null)
        {
            var futureBatchStat = new FutureBatchStats
            {
                Timestamp = SystemTime.UtcNow,
                PrefetchingUser = PrefetchingUser
            };
            var sp = Stopwatch.StartNew();
            context.AddFutureBatch(futureBatchStat);

            var docsCountRef = new Reference<int?>() {Value = docsCount};
            var cts = new CancellationTokenSource();
            var linkedToken = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, context.CancellationToken);
            var futureIndexBatch = new FutureIndexBatch
            {
                StartingEtag = nextEtag,
                Age = Interlocked.Increment(ref currentIndexingAge),
                CancellationTokenSource = cts,
                Type = batchType,
                DocsCount = docsCountRef,
                Task = Task.Run(() =>
                {
                    List<JsonDocument> jsonDocuments = null;
                    int localWork = 0;
                    var earlyExit = new Reference<bool>();
                    while (context.RunIndexing)
                    {
                        linkedToken.Token.ThrowIfCancellationRequested();
                        jsonDocuments = GetJsonDocsFromDisk(
                            linkedToken.Token,
                            Abstractions.Util.EtagUtil.Increment(nextEtag, -1), untilEtag, earlyExit);

                        if (jsonDocuments.Count > 0)
                            break;

                        futureBatchStat.Retries++;

                        context.WaitForWork(TimeSpan.FromMinutes(10), ref localWork, "PreFetching");
                    }

                    futureBatchStat.Duration = sp.Elapsed;
                    futureBatchStat.Size = jsonDocuments == null ? 0 : jsonDocuments.Count;

                    if (jsonDocuments == null)
                        return null;

                    LogEarlyExit(nextEtag, untilEtag, batchType == FutureBatchType.EarlyExit, 
                        jsonDocuments, sp.ElapsedMilliseconds);

                    if (untilEtag != null && earlyExit.Value)
                    {
                        var lastEtag = GetHighestEtag(jsonDocuments);
                        context.TransactionalStorage.Batch(accessor =>
                        {
                            lastEtag = accessor.Documents.GetBestNextDocumentEtag(lastEtag);
                        });

                        if (log.IsDebugEnabled)
                        {
                            log.Debug("Early exit from last future splitted batch, need to fetch documents from etag: {0} to etag: {1}",
                                lastEtag, untilEtag);
                        }

                        linkedToken.Token.ThrowIfCancellationRequested();
                        docsCountRef.Value = jsonDocuments.Count;
                        var docsLeft = docsCount - jsonDocuments.Count;
                        if (docsLeft > 0 && lastEtag.CompareTo(untilEtag) <= 0)
                            AddFutureBatch(lastEtag, untilEtag, FutureBatchType.EarlyExit, docsLeft);
                    }
                    else
                    {
                        linkedToken.Token.ThrowIfCancellationRequested();
                        MaybeAddFutureBatch(jsonDocuments);
                    }
                    return jsonDocuments;
                }, linkedToken.Token)
                .ContinueWith(t =>
                {
                    using (cts)
                    using (linkedToken)
                    {
                        t.AssertNotFailed();
                    }
                    return t.Result;
                })
            };

            futureIndexBatch.Task.ContinueWith(t =>
            {
                try
                {
                    if (linkedToken.IsCancellationRequested == false)
                        FutureBatchCompleted(t.Result.Count);
                }
                catch (ObjectDisposedException)
                {
                    // this is an expected race with the actual task, this is fine
                }
            });

            var addFutureBatch = futureIndexBatches.TryAdd(nextEtag, futureIndexBatch);
            if (addFutureBatch == false)
            {
                log.Info(string.Format("A future batch starting with {0} etag is already running", nextEtag));
                cts.Cancel();
            }

            return addFutureBatch;
        }
示例#4
0
        private bool AddFutureBatch(Etag nextEtag, Etag untilEtag, bool isSplitted = false, bool isEarlyExitBatch = false)
        {
            var futureBatchStat = new FutureBatchStats
            {
                Timestamp = SystemTime.UtcNow,
                PrefetchingUser = PrefetchingUser
            };
            Stopwatch sp = Stopwatch.StartNew();
            context.AddFutureBatch(futureBatchStat);

            var cts = new CancellationTokenSource();
            var linkedToken = CancellationTokenSource.CreateLinkedTokenSource(cts.Token, context.CancellationToken);
            var futureIndexBatch = new FutureIndexBatch
            {
                StartingEtag = nextEtag,
                Age = Interlocked.Increment(ref currentIndexingAge),
                CancellationTokenSource = linkedToken,
                IsSplitted = isSplitted,
                Task = Task.Factory.StartNew(() =>
                {
                    List<JsonDocument> jsonDocuments = null;
                    int localWork = 0;
                    var earlyExit = new Reference<bool>();
                    while (context.RunIndexing)
                    {
                        linkedToken.Token.ThrowIfCancellationRequested();
                        jsonDocuments = GetJsonDocsFromDisk(
                            linkedToken.Token,
                            Abstractions.Util.EtagUtil.Increment(nextEtag, -1), untilEtag, earlyExit);

                        if (jsonDocuments.Count > 0)
                            break;

                        futureBatchStat.Retries++;

                        context.WaitForWork(TimeSpan.FromMinutes(10), ref localWork, "PreFetching");
                    }

                    futureBatchStat.Duration = sp.Elapsed;
                    futureBatchStat.Size = jsonDocuments == null ? 0 : jsonDocuments.Count;

                    if (jsonDocuments == null)
                        return null;

                    LogEarlyExit(nextEtag, untilEtag, isEarlyExitBatch, jsonDocuments, sp.ElapsedMilliseconds);

                    if (untilEtag != null && earlyExit.Value)
                    {
                        var lastEtag = GetHighestEtag(jsonDocuments);
                        context.TransactionalStorage.Batch(accessor =>
                        {
                            lastEtag = accessor.Documents.GetBestNextDocumentEtag(lastEtag);
                        });

                        if (log.IsDebugEnabled)
                        {
                            log.Debug("Early exit from last future splitted batch, need to fetch documents from etag: {0} to etag: {1}",
                                lastEtag, untilEtag);
                        }

                        linkedToken.Token.ThrowIfCancellationRequested();
                        AddFutureBatch(lastEtag, untilEtag, isEarlyExitBatch: true);
                    }
                    else
                    {
                        linkedToken.Token.ThrowIfCancellationRequested();
                        MaybeAddFutureBatch(jsonDocuments);
                    }
                    return jsonDocuments;
                }, linkedToken.Token)
                .ContinueWith(t =>
                {
                    t.AssertNotFailed();
                    linkedToken = null;
                    return t.Result;
                }, linkedToken.Token)
            };

            futureIndexBatch.Task.ContinueWith(t =>
            {
                FutureBatchCompleted(t.Result.Count);
            }, linkedToken.Token);
            
            return futureIndexBatches.TryAdd(nextEtag, futureIndexBatch);
        }
示例#5
0
        private void AddFutureBatch(Etag nextEtag, Etag untilEtag)
        {
            var futureBatchStat = new FutureBatchStats
            {
                Timestamp       = SystemTime.UtcNow,
                PrefetchingUser = PrefetchingUser
            };
            Stopwatch sp = Stopwatch.StartNew();

            context.AddFutureBatch(futureBatchStat);
            var futureIndexBatch = new FutureIndexBatch
            {
                StartingEtag = nextEtag,
                Age          = Interlocked.Increment(ref currentIndexingAge),
                Task         = Task.Factory.StartNew(() =>
                {
                    List <JsonDocument> jsonDocuments = null;
                    int localWork = 0;
                    var earlyExit = new Reference <bool>();
                    while (context.RunIndexing)
                    {
                        jsonDocuments = GetJsonDocsFromDisk(Abstractions.Util.EtagUtil.Increment(nextEtag, -1), untilEtag, earlyExit);
                        if (jsonDocuments.Count > 0)
                        {
                            break;
                        }

                        futureBatchStat.Retries++;

                        context.WaitForWork(TimeSpan.FromMinutes(10), ref localWork, "PreFetching");
                    }

                    if (log.IsDebugEnabled && jsonDocuments != null)
                    {
                        var size = jsonDocuments.Sum(x => x.SerializedSizeOnDisk) / 1024;
                        log.Debug("Got {0} documents ({3:#,#;;0} kb) in a future batch, starting from etag {1}, took {2:#,#;;0}ms", jsonDocuments.Count, nextEtag, sp.ElapsedMilliseconds,
                                  size);
                        if (size > jsonDocuments.Count * 8 ||
                            sp.ElapsedMilliseconds > 3000)
                        {
                            if (log.IsDebugEnabled)
                            {
                                var topSizes = jsonDocuments
                                               .OrderByDescending(x => x.SerializedSizeOnDisk)
                                               .Take(10)
                                               .Select(x => string.Format("{0} - {1:#,#;;0}kb", x.Key, x.SerializedSizeOnDisk / 1024));

                                log.Debug("Slow load of documents in batch, maybe large docs? Top 10 largest docs are: ({0})", string.Join(", ", topSizes));
                            }
                        }
                    }

                    futureBatchStat.Duration = sp.Elapsed;
                    futureBatchStat.Size     = jsonDocuments == null ? 0 : jsonDocuments.Count;

                    if (jsonDocuments == null)
                    {
                        return(null);
                    }

                    if (untilEtag != null && earlyExit.Value)
                    {
                        var lastEtag = GetHighestEtag(jsonDocuments);
                        context.TransactionalStorage.Batch(accessor =>
                        {
                            lastEtag = accessor.Documents.GetBestNextDocumentEtag(lastEtag);
                        });

                        if (log.IsDebugEnabled)
                        {
                            log.Debug("Early exit from last future splitted batch, need to fetch documents from etag: {0} to etag: {1}",
                                      lastEtag, untilEtag);
                        }
                        AddFutureBatch(lastEtag, untilEtag);
                    }
                    else
                    {
                        MaybeAddFutureBatch(jsonDocuments);
                    }
                    return(jsonDocuments);
                }).ContinueWith(t =>
                {
                    t.AssertNotFailed();
                    return(t.Result);
                })
            };

            futureIndexBatch.Task.ContinueWith(t =>
            {
                FutureBatchCompleted(t.Result.Count);
            });

            futureIndexBatches.TryAdd(nextEtag, futureIndexBatch);
        }