internal bool ValidateBusinessRules(FlowValidationRules rules, out List <string> errorMessages) { var acceptedFileExtensions = new AcceptedFileExtensions(); errorMessages = new List <string>(); if (rules.MaxFileSize.HasValue && TotalSize > rules.MaxFileSize.Value) { errorMessages.Add(rules.MaxFileSizeMessage ?? "size"); } if (!acceptedFileExtensions.IsExtensionAllowed(rules.AcceptedExtensions, FileName)) { errorMessages.Add(rules.AcceptedExtensionsMessage ?? "type"); } return(errorMessages.Count == 0); }
public override void OnActionExecuting(ActionExecutingContext filterContext) { var flowJs = new FlowJsRepo(); var request = filterContext.HttpContext.Request; var validationRules = new FlowValidationRules(); validationRules.AcceptedExtensions.AddRange(Extensions); validationRules.MaxFileSize = Size; var status = flowJs.PostChunk(request, Path.GetTempPath(), validationRules); if (status.Status == PostChunkStatus.Done) { var parameterDescriptor = filterContext.ActionDescriptor .Parameters .FirstOrDefault(x => x.ParameterType == typeof(FlowFile)); filterContext.ActionArguments[parameterDescriptor.Name] = new FlowFile { flowFilename = status.FileName, path = Path.Combine(Path.GetTempPath(), status.FileName) }; return; } if (status.Status == PostChunkStatus.Error) { //TODO: Figure out how we can return the flow errors to the client filterContext.Result = new BadRequestResult(); } else { filterContext.Result = new AcceptedResult(); } base.OnActionExecuting(filterContext); }
private FlowJsPostChunkResponse PostChunkBase(HttpRequest request, string folder, FlowValidationRules validationRules) { Console.WriteLine($"Request Content-Length={request.ContentLength}"); //var body = new StreamReader(request.Body).ReadToEnd(); //Console.WriteLine($"Request Body={body}"); var chunk = new FlowChunk(); var requestIsSane = chunk.ParseForm(request.Form); if (!requestIsSane) { Console.WriteLine("Experienced an error in the submitted form - form damaged?"); var errResponse = new FlowJsPostChunkResponse { Status = PostChunkStatus.Error }; errResponse.ErrorMessages.Add("damaged"); } List <string> errorMessages = null; var file = request.Form.Files[0]; var response = new FlowJsPostChunkResponse { FileName = chunk.FileName, Size = chunk.TotalSize }; var chunkIsValid = true; Console.WriteLine("Processing validation rules"); if (validationRules != null) { chunkIsValid = chunk.ValidateBusinessRules(validationRules, out errorMessages); } if (!chunkIsValid) { Console.WriteLine($"Experienced an error while validating rules {errorMessages.Aggregate((s, a) => s + " " + a)}"); response.Status = PostChunkStatus.Error; response.ErrorMessages = errorMessages; return(response); } var chunkFullPathName = GetChunkFilename(chunk.Number, chunk.Identifier, folder); try { // create folder if it does not exist Console.WriteLine($"Opening or creating folder {folder}"); if (!Directory.Exists(folder)) { Directory.CreateDirectory(folder); } // save file using (var chunkFile = File.Create(chunkFullPathName)) { Console.WriteLine($"Saving chunk file {chunkFullPathName} of length {file.Length}"); file.CopyTo(chunkFile); } } catch (Exception) { Console.WriteLine("Error saving chunk"); throw; } // see if we have more chunks to upload. If so, return here for (int i = 1, l = chunk.TotalChunks; i <= l; i++) { var chunkNameToTest = GetChunkFilename(i, chunk.Identifier, folder); Console.WriteLine($"Checking if chunk exists already {chunkNameToTest}"); var exists = File.Exists(chunkNameToTest); if (!exists) { Console.WriteLine("Some chunks are missing. Sending PartlyDone response"); response.Status = PostChunkStatus.PartlyDone; return(response); } } // Due to timing issues, we may have all chunks uploaded state for all chunks, causing the file to be merged up to n times, where n is the number of chunks // To resolve this, we will have a global lock on a filename lock dict, and then lock the filename lock // Allowing multiple files to be uploaded at once, with one global lock to set the filename state lock (dictLock) { if (!fileLocks.ContainsKey(chunk.Identifier)) { Console.WriteLine($"Created a lock for Identifier {chunk.Identifier} TID: {Thread.CurrentThread.ManagedThreadId}"); fileLocks[chunk.Identifier] = new object(); } } var localLock = fileLocks[chunk.Identifier]; if (Monitor.TryEnter(localLock)) { try { Console.WriteLine($"Claimed a lock for Identifier {chunk.Identifier} TID: {Thread.CurrentThread.ManagedThreadId}"); // if we are here, all chunks are uploaded var fileArray = new List <string>(); Console.WriteLine("All chunks done. the full list of chunks is:"); for (int i = 1, l = chunk.TotalChunks; i <= l; i++) { Console.WriteLine("flow-" + chunk.Identifier + "." + i); fileArray.Add("flow-" + chunk.Identifier + "." + i); } MultipleFilesToSingleFile(folder, fileArray, chunk.FileName); Console.WriteLine("Deleting old chunks"); for (int i = 0, l = fileArray.Count; i < l; i++) { try { Console.WriteLine($"Deleting {fileArray[i]}"); File.Delete(Path.Combine(folder, fileArray[i])); } catch (Exception) { Console.WriteLine("Error deleting chunk file"); } } response.Status = PostChunkStatus.Done; return(response); } finally { // We can remove the lock here, as everyone else will have returned Console.WriteLine($"Released lock for Identifier {chunk.Identifier}"); Monitor.Exit(localLock); // Don't need to lock here fileLocks.Remove(chunk.Identifier); } } // The file has already been locked, so we don't need to do anything as it's currently being merged, and that request will return 200 Console.WriteLine($"All chunks completed, but a lock has already been claimed for Identifier {chunk.Identifier} TID: {Thread.CurrentThread.ManagedThreadId}"); response.Status = PostChunkStatus.PartlyDone; return(response); }
public FlowJsPostChunkResponse PostChunk(HttpRequest request, string folder, FlowValidationRules validationRules = null) { return(PostChunkBase(request, folder, validationRules)); }