public Task <long> CalculateParallelPerByte(string filename) { return(Task.Run(() => { try { this.filename = filename; var dt = DateTime.Now; fileLength = GetLength(filename); partSize = GetBlockSize(fileLength); //1048576;// //reader = new FileStream(filename, FileMode.Open); iterations = (long)Math.Round(fileLength / (decimal)partSize, MidpointRounding.AwayFromZero); //Parallel.For(0, iterations, CalculateFilePart); reader = new FileStream(filename, FileMode.Open); Parallel.For(0, iterations, i => { Calculate(i); }); string str = $"File: {Path.GetFileName(filename)} value: {res} time: {(DateTime.Now - dt).TotalSeconds}, length: {fileLength}"; var fv = new FileValue() { Params = str, FilePath = filename, Summ = res }; ProcessNotifier?.Invoke(str, 100, 100); reader?.Close(); reader?.Dispose(); reader = null; return res; } catch (Exception ex) { //throw new CalculateFileException($"Не удалось высчитать файл: {Path.GetFileName(filename)}", ex); string str = $"Не удалось высчитать файл: " + Path.GetFileName(filename); //ProcessEventNotifier?.Invoke(new FileValue() { Params = str, FileName = filename }, 100, 100); return 0; } finally { reader?.Close(); reader?.Dispose(); reader = null; ClearValues(); } })); }
internal override Task GroupAndWriteFiles(ConcurrentBag <FileValue> res, string dataFileName = null) { return(Task.Run(() => { try { dataFileName = string.IsNullOrEmpty(dataFileName) ? "CountSummDataFile" : dataFileName; progress = 0; var dataFile = new ConcurrentDictionary <string, List <FileValue> >(); ConcurrentBag <string> foldersList = new ConcurrentBag <string>(res.GroupBy(x => x.FolderPath).Select(s => s.Key).ToList()); foreach (var folder in foldersList) { dataFile.GetOrAdd(folder, res.Where(w => w.FolderPath == folder && string.IsNullOrEmpty(w.Error)).ToList()); } var exceptions = new ConcurrentQueue <Exception>(); var parallelOptions = new ParallelOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; var loop = Parallel.ForEach(dataFile, parallelOptions, (item, stp) => { try { if (NeedToStop) { stp.Stop(); } item.Value.ForEach(itm => { itm.FileName = Path.GetFileName(itm.FilePath); }); XmlSerializer writer = new XmlSerializer(item.Value.GetType()); using (var sw = new StreamWriter(item.Key + "//" + dataFileName + ".xml")) { writer.Serialize(sw, item.Value); } lock (locker) { progress++; } ProcessNotifier?.Invoke(null, progress, dataFile.Count); } catch (Exception e) { exceptions.Enqueue(e); } }); if (exceptions.Count > 0) { throw new AggregateException(exceptions); } } catch (Exception e) { throw new ReportException(e); } })); }
private void ReadAndCalculateFile(string filename, ParallelLoopState stp, int blockSize = 0) { try { long res = 0; var dt = DateTime.Now; var fileLength = GetLength(filename); using (BinaryReader reader = new BinaryReader(new FileStream(filename, FileMode.Open))) { //threadIDs.Add(Thread.CurrentThread.ManagedThreadId); var partSize = (blockSize == 0) ? GetBlockSize(fileLength) : blockSize; byte[] bytes; for (int i = 0; i < fileLength; i += partSize) { if (NeedToStop) { stp?.Stop(); GC.Collect(); return; } bytes = new byte[partSize]; bytes = reader.ReadBytes(bytes.Length); var resu = CalculateValues(bytes); res += resu; GC.Collect(); } var addInfo = $" File: {filename} value: {res}, time: {(DateTime.Now - dt).TotalSeconds} blockSize:{partSize}"; var fv = new FileValue { FilePath = filename, Summ = res, Params = addInfo }; lock (locker) { fileValues.Add(fv); progress++; } FileCompleteNotifier?.Invoke(fv, true); ProcessNotifier?.Invoke(addInfo, progress, FilesCount); //threadIDs.(Thread.CurrentThread.ManagedThreadId); } } catch (Exception ex) { if (ex is OutOfMemoryException) { ReadAndCalculateFile(filename, stp, 1048576); return; } if (ex is StopException) { return; } FileValue fv; var addInfo = $"File: {Path.GetFileName(filename)} - ошибка. {ex.Message}"; lock (locker) { fv = new FileValue() { FilePath = filename, Summ = 0, Error = ex.Message, Params = addInfo }; fileValues.Add(fv); } progress++; FileCompleteNotifier?.Invoke(fv, false, addInfo); ProcessNotifier?.Invoke(addInfo, progress, FilesCount); } }