Exemplo n.º 1
0
        /// <summary>
        /// </summary>
        /// <param name="url">The URL of the resource to download.</param>
        /// <param name="parts">Number of parts to download file as</param>
        /// <param name="outFile">Outfile name, will be auto generated if null.</param>
        /// <param name="onUpdate">Progress OnComplete function</param>
        /// <returns></returns>
        public async Task DownloadFile(string url, double parts, string outFile = null)
        {
            #region Variables

            EventfulConcurrentQueue <FileChunk> asyncTasks;
            TransformManyBlock <IEnumerable <FileChunk>, FileChunk> getFileChunk;
            TransformBlock <FileChunk, Tuple <Task <HttpResponseMessage>, FileChunk> > getStream;
            ActionBlock <Tuple <Task <HttpResponseMessage>, FileChunk> > writeStream;
            //Get response length
            _responseLength = (await WebRequest.Create(url).GetResponseAsync()).ContentLength;
            //Calculate Part size
            long partSize = (long)Math.Round(_responseLength / parts);
            //Get the content ranges to download
            var pieces = GetChunkList(partSize, _responseLength);
            //URL To uri
            Uri uri = new Uri(url);
            //Outfile name for later null check
            string filename = "";
            filename = outFile ?? Path.GetFileName(uri.LocalPath);

            #endregion

            //Console.WriteLine(_responseLength + " TOTAL SIZE");
            //Console.WriteLine(partSize + " PART SIZE" + "\n");

            Progress.Detail = $"文件总大小:{_responseLength}字节";

            //Set max threads to those supported by system
            SetMaxThreads();

            try
            {
                //Using custom concurrent queue to implement Enqueue and Dequeue Events
                asyncTasks = GetTaskList(parts);

                Progress.Detail = "缓存完成";

                //Transform many to get from List<Filechunk> => Filechunk essentially iterating
                getFileChunk =
                    new TransformManyBlock <IEnumerable <FileChunk>, FileChunk>(chunk => chunk,
                                                                                new ExecutionDataflowBlockOptions());

                //Gets the request stream from the filechunk
                getStream = new TransformBlock <FileChunk, Tuple <Task <HttpResponseMessage>, FileChunk> >(piece =>
                {
                    var newTask = GetStreamTask(piece, _responseLength, uri, asyncTasks);
                    return(newTask);
                },
                                                                                                           new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = Environment.ProcessorCount, // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount  // Parallelize on all cores
                }
                                                                                                           );

                //Writes the request stream to a tempfile
                writeStream = new ActionBlock <Tuple <Task <HttpResponseMessage>, FileChunk> >(async task =>
                {
                    using (Stream streamToRead = await task.Item1.Result.Content.ReadAsStreamAsync())
                    {
                        using (FileStream fileToWriteTo = File.Open(task.Item2.TempFileName, FileMode.OpenOrCreate,
                                                                    FileAccess.ReadWrite, FileShare.ReadWrite))
                        {
                            fileToWriteTo.Position = 0;
                            await streamToRead.CopyToAsync(fileToWriteTo, (int)partSize, CancellationToken.None);
                        }

                        FileChunk s = new FileChunk();
                        Interlocked.Add(ref _tasksDone, 1);
                        asyncTasks.TryDequeue(out s);
                    }

                    GC.Collect(0, GCCollectionMode.Forced);
                }, new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = Environment.ProcessorCount, // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount  // Parallelize on all cores
                });

                //Propage errors and completion
                DataflowLinkOptions linkOptions = new DataflowLinkOptions {
                    PropagateCompletion = true
                };

                //Build the data flow pipeline
                getFileChunk.LinkTo(getStream, linkOptions);
                getStream.LinkTo(writeStream, linkOptions);

                //Post the file pieces
                getFileChunk.Post(pieces);
                getFileChunk.Complete();

                //Write all the streams
                await writeStream.Completion.ContinueWith(task =>
                {
                    //If all the tasks are done, Join the temp files
                    if (asyncTasks.Count == 0)
                    {
                        CombineMultipleFilesIntoSingleFile(pieces, filename);
                    }
                }, CancellationToken.None, TaskContinuationOptions.OnlyOnRanToCompletion, TaskScheduler.Current);
            }
            catch (Exception ex)
            {
                Progress.Detail = "发生错误:" + ex.Message;

                //Delete the temp files if there's an error
                foreach (FileChunk piece in pieces)
                {
                    try
                    {
                        File.Delete(piece.TempFileName);
                    }
                    catch (FileNotFoundException)
                    {
                    }
                }
            }

            Progress.TriggerComplete(this, EventArgs.Empty);
        }
Exemplo n.º 2
0
        public async Task <byte[]> DownloadByteArray(string url, double parts)
        {
            _responseLength = (await WebRequest.Create(url).GetResponseAsync()).ContentLength;
            long partSize = (long)Math.Floor(_responseLength / parts);
            var  pieces   = new List <FileChunk>();

            ThreadPool.GetMaxThreads(out int maxWorkerThreads,
                                     out int maxConcurrentActiveRequests);

            bool changeSucceeded = ThreadPool.SetMaxThreads(
                maxWorkerThreads, maxConcurrentActiveRequests);

            //Console.WriteLine(responseLength + " TOTAL SIZE");
            //Console.WriteLine(partSize + " PART SIZE" + "\n");

            Progress.Detail = $"文件总大小:{_responseLength}字节";

            try
            {
                using MemoryStream ms = new MemoryStream();
                ms.SetLength(_responseLength);

                //Using custom concurrent queue to implement Enqueue and Dequeue Events
                var asyncTasks = new EventfulConcurrentQueue <FileChunk>();

                //Delegate for Dequeue
                asyncTasks.ItemDequeued += delegate
                {
                    //Tasks done holds the count of the tasks done
                    //Parts *2 because there are Parts number of Enqueue AND Dequeue operations
                    Progress.Percentage = _tasksDone / (parts * 2);
                };

                //Delegate for Enqueue
                asyncTasks.ItemEnqueued += delegate
                {
                    Progress.Percentage = _tasksDone / (parts * 2);
                };

                // GetResponseAsync deadlocks for some reason so switched to HttpClient instead
                HttpClient client = new HttpClient(
                    //Use our custom Retry handler, with a max retry value of 10
                    new RetryHandler(new HttpClientHandler(), 10))
                {
                    MaxResponseContentBufferSize = 1000000000
                };

                client.DefaultRequestHeaders.ConnectionClose = false;
                client.Timeout = Timeout.InfiniteTimeSpan;

                //Variable to hold the old loop end
                int previous = 0;

                //Loop to add all the events to the queue
                for (int i = (int)partSize; i <= _responseLength; i += (int)partSize)
                {
                    Progress.Detail = "写入缓存……";

                    if (i + partSize < _responseLength)
                    {
                        //Start and end values for the chunk
                        int start      = previous;
                        int currentEnd = i;

                        pieces.Add(new FileChunk(start, currentEnd));

                        //Set the start of the next loop to be the current end
                        previous = currentEnd;
                    }
                    else
                    {
                        //Start and end values for the chunk
                        int start      = previous;
                        int currentEnd = i;

                        pieces.Add(new FileChunk(start, (int)_responseLength));

                        //Set the start of the next loop to be the current end
                        previous = currentEnd;
                    }
                }

                var getFileChunk = new TransformManyBlock <IEnumerable <FileChunk>, FileChunk>(chunk => chunk,
                                                                                               new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = int.MaxValue,              // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                });

                var getStream = new TransformBlock <FileChunk, Tuple <Task <HttpResponseMessage>, FileChunk> >(
                    piece =>
                {
                    Progress.Detail = "正在下载……";

                    //Open a http request with the range
                    HttpRequestMessage request = new HttpRequestMessage {
                        RequestUri = new Uri(url)
                    };
                    request.Headers.Range = new RangeHeaderValue(piece.Start, piece.End);

                    //Send the request
                    var downloadTask = client.SendAsync(request, HttpCompletionOption.ResponseContentRead);

                    //Use interlocked to increment Tasks done by one
                    Interlocked.Add(ref _tasksDone, 1);
                    asyncTasks.Enqueue(piece);

                    return(new Tuple <Task <HttpResponseMessage>, FileChunk>(downloadTask, piece));
                }, new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = (int)parts,                // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                }
                    );

                var writeStream = new ActionBlock <Tuple <Task <HttpResponseMessage>, FileChunk> >(async tuple =>
                {
                    var buffer = new byte[tuple.Item2.End - tuple.Item2.Start];
                    using (Stream stream = await tuple.Item1.Result.Content.ReadAsStreamAsync())
                    {
                        await stream.ReadAsync(buffer, 0, buffer.Length);
                    }

                    lock (ms)
                    {
                        ms.Position = tuple.Item2.Start;
                        ms.Write(buffer, 0, buffer.Length);
                    }

                    FileChunk s = new FileChunk();
                    asyncTasks.TryDequeue(out s);
                    Interlocked.Add(ref _tasksDone, 1);
                }, new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = (int)parts,                // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                });

                DataflowLinkOptions linkOptions = new DataflowLinkOptions {
                    PropagateCompletion = true
                };

                getFileChunk.LinkTo(getStream, linkOptions);
                getStream.LinkTo(writeStream, linkOptions);

                getFileChunk.Post(pieces);
                getFileChunk.Complete();

                await writeStream.Completion.ContinueWith(task =>
                {
                    if (asyncTasks.Count != 0)
                    {
                        return;
                    }
                    ms.Flush();
                    ms.Close();
                    //onComplete?.Invoke(ms.ToArray());
                });

                Progress.TriggerComplete(this, EventArgs.Empty);

                return(ms.ToArray());
            }
            catch (Exception ex)
            {
                Progress.Detail = "发生错误:" + ex.Message;
            }

            Progress.TriggerComplete(this, EventArgs.Empty);

            return(null);
        }