Exemplo n.º 1
0
        private Tuple <Task <HttpResponseMessage>, FileChunk> GetStreamTask(FileChunk piece, long responseLength, Uri uri,
                                                                            EventfulConcurrentQueue <FileChunk> asyncTasks)
        {
            using var wcObj = _wcPool.Get();

            Progress.Detail = "正在下载……";

            //Open a http request with the range
            HttpRequestMessage request = new HttpRequestMessage {
                RequestUri = uri
            };

            request.Headers.ConnectionClose = false;
            request.Headers.Range           = new RangeHeaderValue(piece.Start, piece.End);

            //Send the request
            var downloadTask = wcObj.Value.SendAsync(request, HttpCompletionOption.ResponseContentRead,
                                                     CancellationToken.None);

            //Use interlocked to increment Tasks done by one
            Interlocked.Add(ref _tasksDone, 1);
            asyncTasks.Enqueue(piece);

            return(new Tuple <Task <HttpResponseMessage>, FileChunk>(downloadTask, piece));
        }
Exemplo n.º 2
0
        public async Task <byte[]> DownloadByteArray(string url, double parts)
        {
            _responseLength = (await WebRequest.Create(url).GetResponseAsync()).ContentLength;
            long partSize = (long)Math.Floor(_responseLength / parts);
            var  pieces   = new List <FileChunk>();

            ThreadPool.GetMaxThreads(out int maxWorkerThreads,
                                     out int maxConcurrentActiveRequests);

            bool changeSucceeded = ThreadPool.SetMaxThreads(
                maxWorkerThreads, maxConcurrentActiveRequests);

            //Console.WriteLine(responseLength + " TOTAL SIZE");
            //Console.WriteLine(partSize + " PART SIZE" + "\n");

            Progress.Detail = $"文件总大小:{_responseLength}字节";

            try
            {
                using MemoryStream ms = new MemoryStream();
                ms.SetLength(_responseLength);

                //Using custom concurrent queue to implement Enqueue and Dequeue Events
                var asyncTasks = new EventfulConcurrentQueue <FileChunk>();

                //Delegate for Dequeue
                asyncTasks.ItemDequeued += delegate
                {
                    //Tasks done holds the count of the tasks done
                    //Parts *2 because there are Parts number of Enqueue AND Dequeue operations
                    Progress.Percentage = _tasksDone / (parts * 2);
                };

                //Delegate for Enqueue
                asyncTasks.ItemEnqueued += delegate
                {
                    Progress.Percentage = _tasksDone / (parts * 2);
                };

                // GetResponseAsync deadlocks for some reason so switched to HttpClient instead
                HttpClient client = new HttpClient(
                    //Use our custom Retry handler, with a max retry value of 10
                    new RetryHandler(new HttpClientHandler(), 10))
                {
                    MaxResponseContentBufferSize = 1000000000
                };

                client.DefaultRequestHeaders.ConnectionClose = false;
                client.Timeout = Timeout.InfiniteTimeSpan;

                //Variable to hold the old loop end
                int previous = 0;

                //Loop to add all the events to the queue
                for (int i = (int)partSize; i <= _responseLength; i += (int)partSize)
                {
                    Progress.Detail = "写入缓存……";

                    if (i + partSize < _responseLength)
                    {
                        //Start and end values for the chunk
                        int start      = previous;
                        int currentEnd = i;

                        pieces.Add(new FileChunk(start, currentEnd));

                        //Set the start of the next loop to be the current end
                        previous = currentEnd;
                    }
                    else
                    {
                        //Start and end values for the chunk
                        int start      = previous;
                        int currentEnd = i;

                        pieces.Add(new FileChunk(start, (int)_responseLength));

                        //Set the start of the next loop to be the current end
                        previous = currentEnd;
                    }
                }

                var getFileChunk = new TransformManyBlock <IEnumerable <FileChunk>, FileChunk>(chunk => chunk,
                                                                                               new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = int.MaxValue,              // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                });

                var getStream = new TransformBlock <FileChunk, Tuple <Task <HttpResponseMessage>, FileChunk> >(
                    piece =>
                {
                    Progress.Detail = "正在下载……";

                    //Open a http request with the range
                    HttpRequestMessage request = new HttpRequestMessage {
                        RequestUri = new Uri(url)
                    };
                    request.Headers.Range = new RangeHeaderValue(piece.Start, piece.End);

                    //Send the request
                    var downloadTask = client.SendAsync(request, HttpCompletionOption.ResponseContentRead);

                    //Use interlocked to increment Tasks done by one
                    Interlocked.Add(ref _tasksDone, 1);
                    asyncTasks.Enqueue(piece);

                    return(new Tuple <Task <HttpResponseMessage>, FileChunk>(downloadTask, piece));
                }, new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = (int)parts,                // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                }
                    );

                var writeStream = new ActionBlock <Tuple <Task <HttpResponseMessage>, FileChunk> >(async tuple =>
                {
                    var buffer = new byte[tuple.Item2.End - tuple.Item2.Start];
                    using (Stream stream = await tuple.Item1.Result.Content.ReadAsStreamAsync())
                    {
                        await stream.ReadAsync(buffer, 0, buffer.Length);
                    }

                    lock (ms)
                    {
                        ms.Position = tuple.Item2.Start;
                        ms.Write(buffer, 0, buffer.Length);
                    }

                    FileChunk s = new FileChunk();
                    asyncTasks.TryDequeue(out s);
                    Interlocked.Add(ref _tasksDone, 1);
                }, new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity        = (int)parts,                // Cap the item count
                    MaxDegreeOfParallelism = Environment.ProcessorCount // Parallelize on all cores
                });

                DataflowLinkOptions linkOptions = new DataflowLinkOptions {
                    PropagateCompletion = true
                };

                getFileChunk.LinkTo(getStream, linkOptions);
                getStream.LinkTo(writeStream, linkOptions);

                getFileChunk.Post(pieces);
                getFileChunk.Complete();

                await writeStream.Completion.ContinueWith(task =>
                {
                    if (asyncTasks.Count != 0)
                    {
                        return;
                    }
                    ms.Flush();
                    ms.Close();
                    //onComplete?.Invoke(ms.ToArray());
                });

                Progress.TriggerComplete(this, EventArgs.Empty);

                return(ms.ToArray());
            }
            catch (Exception ex)
            {
                Progress.Detail = "发生错误:" + ex.Message;
            }

            Progress.TriggerComplete(this, EventArgs.Empty);

            return(null);
        }