static void MergeChunks(string dataPath, IComparer <string> comparer, long records) { var paths = Directory.GetFiles(dataPath, "sorted*.dat"); var chunks = paths.Length; if (chunks == 0) { return; } var readers = new DataReader[chunks]; try { for (int i = 0; i < chunks; i++) { readers[i] = new DataReader(paths[i]); } var lowest_index = -1; var lowest_value = string.Empty; var curr_value = string.Empty; DataReader lowest_reader = null; DataReader curr_reader = null; var progress = 0L; var indx = 0; var j = 0; var progressBlock = records / 10000; using (var sw = new StreamWriter(Path.Combine(dataPath, "BigFileSorted.txt"))) { while (true) { lowest_index = -1; lowest_value = string.Empty; lowest_reader = null; for (j = 0; j < chunks; j++) { curr_reader = readers[j]; if (!curr_reader.IsEnd) { curr_value = curr_reader.Current; if (lowest_index < 0 || comparer.Compare(curr_value, lowest_value) < 0) { lowest_index = j; lowest_value = curr_value; lowest_reader = curr_reader; } } } if (lowest_index == -1) { break; } var cnt = readers.AsParallel().Select((reader, index) => { var res = 0; if (reader == lowest_reader) { res++; reader.Next(); } while (true) { if (!reader.IsEnd && comparer.Compare(reader.Current, lowest_value) == 0) { res++; reader.Next(); } else { break; } } return(res); }).Sum(); if (cnt > 0) { indx = lowest_value.LastIndexOf('.'); curr_value = string.Format("{0}.{1}", lowest_value.Substring(indx + 1).TrimStart(), lowest_value.Substring(0, indx)); for (j = 0; j < cnt; j++) { sw.WriteLine(curr_value); if (++progress % progressBlock == 0) { Console.Write("{0:f2}% \r", 100.0 * progress / records); } } } } } } finally { for (int i = 0; i < chunks; i++) { readers[i].Close(); } } }