/// <summary> /// original name + ".part_N.X" (N = file part number, X = total files) /// Objective = enumerate files in folder, look for all matching parts of split file. If found, merge and return true. /// </summary> /// <param name="FileName"></param> /// <returns></returns> public static bool MergeFile(string FileName) { var rslt = false; var baseFileName = FileName.Substring(0, FileName.IndexOf(PART_TOKEN)); var trailingTokens = FileName.Substring(FileName.IndexOf(PART_TOKEN) + PART_TOKEN.Length); var searchPattern = String.Format("{0}{1}*", Path.GetFileName(baseFileName), PART_TOKEN); var FilesList = Directory.GetFiles(Path.GetDirectoryName(FileName), searchPattern); long.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out long fileIndex); long.TryParse(trailingTokens.Substring(trailingTokens.IndexOf(".") + 1), out long fileCount); if (FilesList.Count() == fileCount && !FileMergeManager.Instance.InUse(baseFileName)) { FileMergeManager.Instance.AddFile(baseFileName); if (File.Exists(baseFileName)) { File.Delete(baseFileName); } var MergeList = new List <SortedFile>(); foreach (string file in FilesList) { var sFile = new SortedFile() { FileName = file }; baseFileName = file.Substring(0, file.IndexOf(PART_TOKEN)); trailingTokens = file.Substring(file.IndexOf(PART_TOKEN) + PART_TOKEN.Length); long.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out fileIndex); sFile.FileOrder = fileIndex; MergeList.Add(sFile); } var MergeOrder = MergeList.OrderBy(s => s.FileOrder).ToList(); using (var FS = new FileStream(baseFileName, FileMode.Create)) { foreach (var chunk in MergeOrder) { try { using (var fileChunk = new FileStream(chunk.FileName, FileMode.Open)) { fileChunk.CopyTo(FS); } } catch { } File.Delete(chunk.FileName); } } rslt = true; FileMergeManager.Instance.RemoveFile(baseFileName); } return(rslt); }
public static string Merge(string part1Path) { var baseFName = part1Path.Substring(0, part1Path.IndexOf(PART_TOKEN)); var tailTokens = part1Path.Substring(part1Path.IndexOf(PART_TOKEN) + PART_TOKEN.Length); var pattern = Path.GetFileName(baseFName) + PART_TOKEN + "*"; var matches = Directory.GetFiles(Path.GetDirectoryName(part1Path), pattern) .OrderBy(x => x).ToList(); var mergeList = new List <SortedFile>(); var fileIndex = 0; var fileCount = 0; if (!int.TryParse(tailTokens.Substring(0, tailTokens.IndexOf(".")), out fileIndex) || !int.TryParse(tailTokens.Substring(tailTokens.IndexOf(".") + 1), out fileCount)) { throw new FileFormatException("Invalid file part name: " + part1Path); } if (matches.Count() != fileCount) { throw new InvalidOperationException("Unexpected # of file parts"); } if (File.Exists(baseFName)) { File.Delete(baseFName); } foreach (string filePart in matches) { var sFile = new SortedFile(filePart); baseFName = filePart.Substring(0, filePart.IndexOf(PART_TOKEN)); tailTokens = filePart.Substring(filePart.IndexOf(PART_TOKEN) + PART_TOKEN.Length); if (!int.TryParse(tailTokens.Substring(0, tailTokens.IndexOf(".")), out fileIndex)) { throw new FileFormatException("Invalid file part name: " + filePart); } sFile.FileOrder = fileIndex; mergeList.Add(sFile); } var orderedParts = mergeList.OrderBy(s => s.FileOrder) .Select(x => x.FileName).ToList(); WriteOneBigFile(baseFName, orderedParts); return(baseFName); }
// sorterar chunks så vi har dem i rätt ordning innan vi klistrar ihop private List <SortedFile> SortMergeList(string[] filesList) { List <SortedFile> mergeList = new List <SortedFile>(); foreach (var file in filesList) { trailingToken = file.Substring(file.IndexOf(partToken) + partToken.Length); int.TryParse(trailingToken.Substring(0, trailingToken.IndexOf(".")), out fileIndex); SortedFile sFile = new SortedFile(); sFile.FileName = file; sFile.FileOrder = fileIndex; mergeList.Add(sFile); } var MergeOrder = mergeList.OrderBy(s => s.FileOrder).ToList(); return(MergeOrder); }
private bool MergeFile(string FileName) { bool rslt = false; // parse out the different tokens from the filename according to the convention string partToken = ".part_"; string baseFileName = FileName.Substring(0, FileName.IndexOf(partToken)); string trailingTokens = FileName.Substring(FileName.IndexOf(partToken) + partToken.Length); int fileIndex = 0; int fileCount = 0; int.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out fileIndex); int.TryParse(trailingTokens.Substring(trailingTokens.IndexOf(".") + 1), out fileCount); // get a list of all file parts in the temp folder string searchpattern = Path.GetFileName(baseFileName) + partToken + "*"; string[] filesList = Directory.GetFiles(Path.GetDirectoryName(FileName), searchpattern); // merge .. improvement would be to confirm individual parts are there / correctly in // sequence, a security check would also be important // only proceed if we have received all the file chunks if (filesList.Count() == fileCount) { // use a singleton to stop overlapping processes //if (!MergeFileManager.Instance.InUse(baseFileName)) //{ //MergeFileManager.Instance.AddFile(baseFileName); if (System.IO.File.Exists(baseFileName)) { System.IO.File.Delete(baseFileName); } // add each file located to a list so we can get them into // the correct order for rebuilding the file List <SortedFile> mergeList = new List <SortedFile>(); foreach (string file in filesList) { SortedFile sFile = new SortedFile(); sFile.FileName = file; baseFileName = file.Substring(0, file.IndexOf(partToken)); trailingTokens = file.Substring(file.IndexOf(partToken) + partToken.Length); int.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out fileIndex); sFile.FileOrder = fileIndex; mergeList.Add(sFile); } // sort by the file-part number to ensure we merge back in the correct order var mergeOrder = mergeList.OrderBy(s => s.FileOrder).ToList(); using (FileStream FS = new FileStream(baseFileName, FileMode.Create)) { // merge each file chunk back into one contiguous file stream foreach (var chunk in mergeOrder) { try { using (FileStream fileChunk = new FileStream(chunk.FileName, FileMode.Open)) { fileChunk.CopyTo(FS); } System.IO.File.Delete(chunk.FileName); } catch (IOException ex) { // handle } } } rslt = true; // unlock the file from singleton //MergeFileManager.Instance.RemoveFile(baseFileName); //} } return(rslt); }
private bool MergeCachedFile(string FileName) { bool rslt = false; // parse out the different tokens from the filename according to the convention string partToken = ".part_"; string baseFileName = FileName.Substring(0, FileName.IndexOf(partToken)); string trailingTokens = FileName.Substring(FileName.IndexOf(partToken) + partToken.Length); int fileIndex = 0; int fileCount = 0; int.TryParse(trailingTokens.Substring(0, trailingTokens.IndexOf(".")), out fileIndex); int.TryParse(trailingTokens.Substring(trailingTokens.IndexOf(".") + 1), out fileCount); if (fileIndex == fileCount) { // add each file located to a list so we can get them into // the correct order for rebuilding the file List <SortedFile> mergeList = new List <SortedFile>(); for (int i = 1; i <= fileCount; i++) { string chunkName = $"{baseFileName}{partToken}{i}.{fileCount}"; byte[] chunk; if (_cache.TryGetValue(chunkName, out chunk)) { SortedFile sortedChunk = new SortedFile() { FileName = chunkName, FileOrder = i, Bytes = chunk }; mergeList.Add(sortedChunk); } else { // coulnd't find the cached chunk throw new FileNotFoundException(); } } string uploadPath = $"App_Data\\uploads\\{baseFileName}"; using (FileStream stream = new FileStream(uploadPath, FileMode.Create)) { // merge each file chunk back into one contiguous file stream foreach (var chunk in mergeList) { try { stream.Write(chunk.Bytes, 0, chunk.Bytes.Length); _cache.Remove(chunk.FileName); } catch (IOException ex) { // handle } } } rslt = true; // unlock the file from singleton //MergeFileManager.Instance.RemoveFile(baseFileName); //} } return(rslt); }