Example #1
0
        // needed for Unit Testing
        internal static void StartDownloading(CancellationToken ct,  FailureToken ft,
            ILargeFileDownloadParameters parameters, 
            IAsyncProgress<LargeFileDownloadProgressChangedEventArgs> progress = null,
            Action<string> logger = null,
            BufferManager bufferManager = null)
        {
            //create the file
            Stream stream = parameters.GetOutputStream();
            if (parameters.FileSize == 0) // Terminate Zero size files
            {
                if (progress != null)
                {
                    progress.Report(new LargeFileDownloadProgressChangedEventArgs(100, 0, 0,
                                                                                  parameters.FileSize,
                                                                                  parameters.FileSize, "",
                                                                                  "",
                                                                                  null));
                }
                if (parameters.AutoCloseStream)
                    stream.Close();
                return;
            }

            //figure out number of chunks
            int chunkCount = GetChunkCount(parameters.FileSize, parameters.MaxChunkSize);
            int numberOfThreads = Math.Min(parameters.MaxThreads, chunkCount);
            logger = logger ?? ((s) => { });

            var downloadWorkers = new List<Downloader>(numberOfThreads);
            var chunksWritten = new Dictionary<int, bool>();
            bool isFailed = false;
            long totalBytesWritten = 0;
            double byteWriteRate = 0.0;
            try
            {

                var readStack = new ConcurrentStack<int>();

                //add all of the chunks to the stack
                var rangeArray = Enumerable.Range(0, chunkCount).Reverse().ToArray();
                readStack.PushRange(rangeArray);
                chunksWritten = readStack.ToDictionary(k => k, v => false);

                var writeQueue = new ConcurrentQueue<ChunkedFilePart>();

                // ReSharper disable AccessToModifiedClosure
                Func<int, bool> downloadThrottle = (int c) => writeQueue.Count > 30;
                // ReSharper restore AccessToModifiedClosure
                if (bufferManager == null)
                {

                    bufferManager = new BufferManager(new[]
                    {
                        new BufferQueueSetting(SimpleHttpGetByRangeClient.BUFFER_SIZE, (uint) numberOfThreads),
                        new BufferQueueSetting((uint) parameters.MaxChunkSize, (uint) numberOfThreads)
                    });
                }

                int expectedChunkDownloadTime = ExpectedDownloadTimeInSeconds(parameters.MaxChunkSize);

                for (int i = 0; i < numberOfThreads; i++)
                {
                    downloadWorkers.Add(new Downloader(bufferManager, parameters, writeQueue, readStack,
                                        downloadThrottle, expectedChunkDownloadTime, ft, logger, ct));
                }
                //start all the download threads
                downloadWorkers.ForEach(x => x.Start());

                var watch = new System.Diagnostics.Stopwatch();
                watch.Start();
                long oldElapsedMilliSeconds = watch.ElapsedMilliseconds;
                DateTime lastWriteTime = DateTime.MaxValue;
                long lastPointInFile = 0;
                int kc = 0;
                //start the write loop
                while (chunksWritten.Any(kvp => !kvp.Value) && !ct.IsCancellationRequested && !ft.FailureDetected)
                {
                    ChunkedFilePart part;
                    while (writeQueue.TryDequeue(out part) && !ft.FailureDetected)
                    {
                        //retry?
                        logger(string.Format("[{1}] writing chunk: {0}", part.Chunk, parameters.Id));
                        stream.Position = part.FileOffset;
                        stream.Write(part.Content, 0, part.Length);
                        totalBytesWritten += part.Length;
                        bufferManager.FreeBuffer(part.Content);
                        chunksWritten[part.Chunk] = true;
                        lastWriteTime = DateTime.Now;
                        if (progress != null)
                        {
                            var elapsed = watch.ElapsedMilliseconds;
                            var diff = elapsed - oldElapsedMilliSeconds;
                            if (diff > 2000)
                            {
                                long bytesDownloaded = (long)chunksWritten.Count(kvp => kvp.Value) * parameters.MaxChunkSize;
                                long interimReads = bytesDownloaded + part.Length - lastPointInFile;
                                byteWriteRate = (interimReads / (diff / (double)1000));

                                lastPointInFile += interimReads;
                                oldElapsedMilliSeconds = elapsed;
                                progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize),
                                                                                              byteWriteRate, byteWriteRate, totalBytesWritten, totalBytesWritten, "", "", null));
                            }
                        }
                    }

                    // kill hanged workers
                    var timedOutWorkers = downloadWorkers
                        .Where(w => w.Status == ThreadState.Running || w.Status == ThreadState.WaitSleepJoin)
                        .Where((w) =>
                           {
                               if (w.SimulateTimedOut)
                                   return true;
                               return w.HeartBeat.AddSeconds(expectedChunkDownloadTime) < DateTime.Now;
                           })
                   .ToList();

                    if (timedOutWorkers.Any())
                    {
                        foreach (var worker in timedOutWorkers)
                        {
                            try
                            {
                                worker.DownloadWorkerThread.Abort(); // this has a minute chance of throwing
                                logger(string.Format("[{1}] killing thread as it timed out {0}", kc++, parameters.Id));
                                if (worker.SimulateTimedOut)
                                    Thread.Sleep(3000); // introduce delay for unit test to pick-up the condition
                            }
                            catch (Exception)
                            { }
                        }
                    }

                    var activeWorkers = downloadWorkers.Where(x => x != null &&
                        (x.Status == ThreadState.Running
                        || x.Status == ThreadState.WaitSleepJoin)).ToList();
                    // re-spawn the missing workers if some had too many retries or were killed

                    if (NeedToCheckForUnwrittenChunks(readStack, lastWriteTime, STALE_WRITE_CHECK_MINUTES))
                    {
                        // if there are any parts remaining to be written, AND the read stack is empty
                        var unreadParts = chunksWritten.Where(kvp => !kvp.Value);
                        if (readStack.IsEmpty && unreadParts.Any() && !ft.FailureDetected)
                        {
                            logger(string.Format("read stack is empty, but there remains unwritten parts!  Adding {0} parts back to read stack.", unreadParts.Count()));
                            readStack.Push(unreadParts.Select(kvp => kvp.Key).First());
                        }

                        lastWriteTime = DateTime.Now; // don't check again for a while
                    }

                    //wait for something that was added
                    Thread.Sleep(100);
                    if (activeWorkers.Count() < numberOfThreads)
                    {
                        for (int i = 0; i < numberOfThreads; i++)
                        {
                            if (downloadWorkers[i] == null)
                            {
                                logger(string.Format("[{0}] reviving killed thread", parameters.Id));
                                downloadWorkers[i] = new Downloader(bufferManager, parameters, writeQueue, readStack,
                                downloadThrottle, expectedChunkDownloadTime, ft, logger, ct);
                                downloadWorkers[i].Start();
                                continue;
                            }

                            if (downloadWorkers[i].Status == ThreadState.Running
                                || downloadWorkers[i].Status == ThreadState.WaitSleepJoin
                                || downloadWorkers[i].Status == ThreadState.Background
                                || downloadWorkers[i].Status == ThreadState.Stopped) continue;

                            logger(string.Format("[{0}] reviving killed thread", parameters.Id));
                            downloadWorkers[i] = new Downloader(bufferManager, parameters, writeQueue, readStack,
                                                                downloadThrottle, expectedChunkDownloadTime, ft, logger, ct);
                            downloadWorkers[i].Start();
                        }
                    }

                }

                if (ft.FailureDetected)
                {
                    throw new Exception(String.Format("[{0}]A Non Retry-able Failure was reported by one or more of the download workers.", parameters.Id));
                }
            }
            catch (Exception e)
            {
                // Report Failure
                isFailed = true;
                logger(string.Format("[{0}] Exception: TerminalVelocity Downloading failed.", parameters.Id));
                logger(string.Format("[{0}] Message: {1} ", parameters.Id, e.Message));
                logger(string.Format("[{0}] StackTrace: {1}", parameters.Id, e.StackTrace));
                if (progress != null)
                {
                    progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize), 0, 0, totalBytesWritten, totalBytesWritten, "", "", null, isFailed, e.Message));
                }
            }
            finally
            {
                //kill all the tasks if exist
                if (downloadWorkers != null)
                {
                    downloadWorkers.ForEach(x =>
                    {
                        if (x == null) return;

                        ExecuteAndSquash(x.Dispose);
                    });
                }
                if (parameters.AutoCloseStream)
                {
                    if (progress != null)
                    {
                        progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize), byteWriteRate, byteWriteRate, totalBytesWritten, totalBytesWritten, "", "", null, isFailed));
                    }
                    logger(string.Format("[{0}] AutoClosing stream", parameters.Id));
                    stream.Close();
                }
            }
        }
        // needed for Unit Testing

        internal static void StartDownloading(CancellationToken ct, FailureToken ft,
                                              ILargeFileDownloadParameters parameters,
                                              IAsyncProgress <LargeFileDownloadProgressChangedEventArgs> progress = null,
                                              Action <string> logger      = null,
                                              BufferManager bufferManager = null)
        {
            //create the file
            Stream stream = parameters.GetOutputStream();

            if (parameters.FileSize == 0) // Terminate Zero size files
            {
                if (progress != null)
                {
                    progress.Report(new LargeFileDownloadProgressChangedEventArgs(100, 0, 0,
                                                                                  parameters.FileSize,
                                                                                  parameters.FileSize, "",
                                                                                  "",
                                                                                  null));
                }
                if (parameters.AutoCloseStream)
                {
                    stream.Close();
                }
                return;
            }

            //figure out number of chunks
            int chunkCount      = GetChunkCount(parameters.FileSize, parameters.MaxChunkSize);
            int numberOfThreads = Math.Min(parameters.MaxThreads, chunkCount);

            logger = logger ?? ((s) => { });


            var    downloadWorkers   = new List <Downloader>(numberOfThreads);
            var    chunksWritten     = new Dictionary <int, bool>();
            bool   isFailed          = false;
            long   totalBytesWritten = 0;
            double byteWriteRate     = 0.0;

            try
            {
                var readStack = new ConcurrentStack <int>();

                //add all of the chunks to the stack
                var rangeArray = Enumerable.Range(0, chunkCount).Reverse().ToArray();
                readStack.PushRange(rangeArray);
                chunksWritten = readStack.ToDictionary(k => k, v => false);

                var writeQueue = new ConcurrentQueue <ChunkedFilePart>();

                // ReSharper disable AccessToModifiedClosure
                Func <int, bool> downloadThrottle = (int c) => writeQueue.Count > 30;
                // ReSharper restore AccessToModifiedClosure
                if (bufferManager == null)
                {
                    bufferManager = new BufferManager(new[]
                    {
                        new BufferQueueSetting(SimpleHttpGetByRangeClient.BUFFER_SIZE, (uint)numberOfThreads),
                        new BufferQueueSetting((uint)parameters.MaxChunkSize, (uint)numberOfThreads)
                    });
                }

                int expectedChunkDownloadTime = ExpectedDownloadTimeInSeconds(parameters.MaxChunkSize);

                for (int i = 0; i < numberOfThreads; i++)
                {
                    downloadWorkers.Add(new Downloader(bufferManager, parameters, writeQueue, readStack,
                                                       downloadThrottle, expectedChunkDownloadTime, ft, logger, ct));
                }
                //start all the download threads
                downloadWorkers.ForEach(x => x.Start());

                var watch = new System.Diagnostics.Stopwatch();
                watch.Start();
                long     oldElapsedMilliSeconds = watch.ElapsedMilliseconds;
                DateTime lastWriteTime          = DateTime.MaxValue;
                long     lastPointInFile        = 0;
                int      kc = 0;
                //start the write loop
                while (chunksWritten.Any(kvp => !kvp.Value) && !ct.IsCancellationRequested && !ft.FailureDetected)
                {
                    ChunkedFilePart part;
                    while (writeQueue.TryDequeue(out part) && !ft.FailureDetected)
                    {
                        //retry?
                        logger(string.Format("[{1}] writing chunk: {0}", part.Chunk, parameters.Id));
                        stream.Position = part.FileOffset;
                        stream.Write(part.Content, 0, part.Length);
                        totalBytesWritten += part.Length;
                        bufferManager.FreeBuffer(part.Content);
                        chunksWritten[part.Chunk] = true;
                        lastWriteTime             = DateTime.Now;
                        if (progress != null)
                        {
                            var elapsed = watch.ElapsedMilliseconds;
                            var diff    = elapsed - oldElapsedMilliSeconds;
                            if (diff > 2000)
                            {
                                long bytesDownloaded = (long)chunksWritten.Count(kvp => kvp.Value) * parameters.MaxChunkSize;
                                long interimReads    = bytesDownloaded + part.Length - lastPointInFile;
                                byteWriteRate = (interimReads / (diff / (double)1000));

                                lastPointInFile       += interimReads;
                                oldElapsedMilliSeconds = elapsed;
                                progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize),
                                                                                              byteWriteRate, byteWriteRate, totalBytesWritten, totalBytesWritten, "", "", null));
                            }
                        }
                    }

                    // kill hanged workers
                    var timedOutWorkers = downloadWorkers
                                          .Where(w => w.Status == ThreadState.Running || w.Status == ThreadState.WaitSleepJoin)
                                          .Where((w) =>
                    {
                        if (w.SimulateTimedOut)
                        {
                            return(true);
                        }
                        return(w.HeartBeat.AddSeconds(expectedChunkDownloadTime) < DateTime.Now);
                    })
                                          .ToList();

                    if (timedOutWorkers.Any())
                    {
                        foreach (var worker in timedOutWorkers)
                        {
                            try
                            {
                                worker.DownloadWorkerThread.Abort(); // this has a minute chance of throwing
                                logger(string.Format("[{1}] killing thread as it timed out {0}", kc++, parameters.Id));
                                if (worker.SimulateTimedOut)
                                {
                                    Thread.Sleep(3000); // introduce delay for unit test to pick-up the condition
                                }
                            }
                            catch (Exception)
                            { }
                        }
                    }

                    var activeWorkers = downloadWorkers.Where(x => x != null &&
                                                              (x.Status == ThreadState.Running ||
                                                               x.Status == ThreadState.WaitSleepJoin)).ToList();
                    // re-spawn the missing workers if some had too many retries or were killed

                    if (NeedToCheckForUnwrittenChunks(readStack, lastWriteTime, STALE_WRITE_CHECK_MINUTES))
                    {
                        // if there are any parts remaining to be written, AND the read stack is empty
                        var unreadParts = chunksWritten.Where(kvp => !kvp.Value);
                        if (readStack.IsEmpty && unreadParts.Any() && !ft.FailureDetected)
                        {
                            logger(string.Format("read stack is empty, but there remains unwritten parts!  Adding {0} parts back to read stack.", unreadParts.Count()));
                            readStack.Push(unreadParts.Select(kvp => kvp.Key).First());
                        }

                        lastWriteTime = DateTime.Now; // don't check again for a while
                    }

                    //wait for something that was added
                    Thread.Sleep(100);
                    if (activeWorkers.Count() < numberOfThreads)
                    {
                        for (int i = 0; i < numberOfThreads; i++)
                        {
                            if (downloadWorkers[i] == null)
                            {
                                logger(string.Format("[{0}] reviving killed thread", parameters.Id));
                                downloadWorkers[i] = new Downloader(bufferManager, parameters, writeQueue, readStack,
                                                                    downloadThrottle, expectedChunkDownloadTime, ft, logger, ct);
                                downloadWorkers[i].Start();
                                continue;
                            }

                            if (downloadWorkers[i].Status == ThreadState.Running ||
                                downloadWorkers[i].Status == ThreadState.WaitSleepJoin ||
                                downloadWorkers[i].Status == ThreadState.Background ||
                                downloadWorkers[i].Status == ThreadState.Stopped)
                            {
                                continue;
                            }

                            logger(string.Format("[{0}] reviving killed thread", parameters.Id));
                            downloadWorkers[i] = new Downloader(bufferManager, parameters, writeQueue, readStack,
                                                                downloadThrottle, expectedChunkDownloadTime, ft, logger, ct);
                            downloadWorkers[i].Start();
                        }
                    }
                }

                if (ft.FailureDetected)
                {
                    throw new Exception(String.Format("[{0}]A Non Retry-able Failure was reported by one or more of the download workers.", parameters.Id));
                }
            }
            catch (Exception e)
            {
                // Report Failure
                isFailed = true;
                logger(string.Format("[{0}] Exception: TerminalVelocity Downloading failed.", parameters.Id));
                logger(string.Format("[{0}] Message: {1} ", parameters.Id, e.Message));
                logger(string.Format("[{0}] StackTrace: {1}", parameters.Id, e.StackTrace));
                if (progress != null)
                {
                    progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize), 0, 0, totalBytesWritten, totalBytesWritten, "", "", null, isFailed, e.Message));
                }
            }
            finally
            {
                //kill all the tasks if exist
                if (downloadWorkers != null)
                {
                    downloadWorkers.ForEach(x =>
                    {
                        if (x == null)
                        {
                            return;
                        }

                        ExecuteAndSquash(x.Dispose);
                    });
                }
                if (parameters.AutoCloseStream)
                {
                    if (progress != null)
                    {
                        progress.Report(new LargeFileDownloadProgressChangedEventArgs(ComputeProgressIndicator(totalBytesWritten, parameters.FileSize), byteWriteRate, byteWriteRate, totalBytesWritten, totalBytesWritten, "", "", null, isFailed));
                    }
                    logger(string.Format("[{0}] AutoClosing stream", parameters.Id));
                    stream.Close();
                }
            }
        }