/// <summary> /// Evaluate the population and select the specified proportion with the best /// fitness scores. /// </summary> /// <param name="population">The population to select from.</param> /// <param name="proportion">The proportion of the population to select.</param> /// <returns></returns> public virtual IList <TPhenotype> Select(IList <TPhenotype> population, double proportion) { var sorted = new SortedList <double, TPhenotype>(); foreach (var option in population) { double fitness = OverallFitness(option); if (fitness != double.NaN) { sorted.AddSafe(fitness, option); } } int target = (int)Math.Ceiling(population.Count * proportion); target = Math.Min(target, sorted.Count); var result = new List <TPhenotype>(target); for (int i = 0; i < target; i++) { if (Settings.Objective == OptimisationObjective.Maximise) { // Get maximum fitness: result.Add(sorted.Values[sorted.Count - 1 - i]); } else { // Get minimum fitness: result.Add(sorted.Values[0]); } } return(result); }
/// <summary> /// Construct a sorted list using this list as the items and another list of related items in the same /// order as the keys. This override for doubles uses the AddSafe extension method to enable duplicate /// keys to be dealt with without throwing an exception. /// </summary> /// <typeparam name="TValue"></typeparam> /// <param name="items"></param> /// <param name="keys"></param> /// <returns></returns> public static SortedList <double, TValue> SortedBy <TValue>(this IList <TValue> items, IList <double> keys) { var result = new SortedList <double, TValue>(items.Count); int count = Math.Min(items.Count, keys.Count); for (int i = 0; i < items.Count; i++) { result.AddSafe(keys[i], items[i]); } return(result); }
/// <summary> /// Helper function to extract all properties from the given type with the specified type of /// ActionParameter annotation. /// </summary> /// <param name="type">The type from which to extract parameters</param> /// <param name="parameterType">The subtype of ActionParameter annotation to be searched for</param> /// <returns></returns> public static IList <PropertyInfo> ExtractActionParameters(this IAction action, Type parameterType) { Type type = action.GetType(); SortedList <double, PropertyInfo> result = new SortedList <double, PropertyInfo>(); PropertyInfo[] pInfos = type.GetProperties(); foreach (PropertyInfo pInfo in pInfos) { object[] attributes = pInfo.GetCustomAttributes(parameterType, true); if (attributes.Count() > 0) { ActionParameterAttribute aInput = (ActionParameterAttribute)attributes[0]; result.AddSafe(aInput.Order, pInfo); } } return(result.Values.ToList()); }
/// <summary>If this request is a POST/PUT/PATCH request, replaces the body of the request with data from the specified stream. /// This will clear and reinitialize all the parameter values and file uploads.</summary> /// <param name="body">Stream to read new request body from.</param> /// <param name="tempPath">The temporary directory to use for file uploads. Default is <see cref="Path.GetTempPath"/>.</param> /// <param name="storeFileUploadInFileAtSize">The maximum size (in bytes) at which file uploads are stored in memory. /// Any uploads that exceed this limit are written to temporary files on disk. Default is 16 MB.</param> internal void ParsePostBody(Stream body, string tempPath = null, long storeFileUploadInFileAtSize = 16 * 1024 * 1024) { _fileUploads.Clear(); _postFields.Clear(); if (Method != HttpMethod.Post) { return; } if (Headers.ContentType == HttpPostContentType.ApplicationXWwwFormUrlEncoded) { using (var reader = new StreamReader(body, Encoding.UTF8)) _postFields = HttpHelper.ParseQueryValueParameters(reader).ToNameValuesCollection(); return; } // An excessively long boundary is going to screw up the following algorithm. // (Actually a limit of up to bufferSize - 8 would work, but I think 1024 is more than enough.) if (body == null || Headers.ContentMultipartBoundary == null || Headers.ContentMultipartBoundary.Length > 1024) { return; } if (tempPath == null) { tempPath = Path.GetTempPath(); } // Instead of reallocating a new buffer multiple times, allocate at most two buffers and switch between them as necessary int bufferSize = 65536; byte[] buffer1 = new byte[bufferSize]; byte[] buffer2 = null; byte[] buffer = buffer1; void switchBuffer(int offset, int count) { if (buffer == buffer1) { if (buffer2 == null) { buffer2 = new byte[bufferSize]; } Buffer.BlockCopy(buffer, offset, buffer2, 0, count); buffer = buffer2; } else { Buffer.BlockCopy(buffer, offset, buffer1, 0, count); buffer = buffer1; } } // Process request body int bytesRead = body.Read(buffer, 0, bufferSize); if (bytesRead == 0) // premature end of request body { return; } // We expect the input to begin with "--" followed by the boundary followed by "\r\n" // It is, however, allowed to have CRLFs before the first "--" var crlfs = 0; while (crlfs + 1 < bytesRead && buffer[crlfs] == '\r' && buffer[crlfs + 1] == '\n') { crlfs += 2; } byte[] expecting = ("--" + Headers.ContentMultipartBoundary + "\r\n").ToUtf8(); int bufferIndex = bytesRead; while (bufferIndex < buffer.Length && bufferIndex < expecting.Length + crlfs) { bytesRead = body.Read(buffer, bufferIndex, buffer.Length - bufferIndex); if (bytesRead == 0) // premature end of request body { return; } bufferIndex += bytesRead; while (crlfs + 1 < bufferIndex && buffer[crlfs] == '\r' && buffer[crlfs + 1] == '\n') { crlfs += 2; } if (expecting.Length + crlfs > buffer.Length) // Sanity check in case the client tries to fill the buffer with just CRLFs { return; } } if (!buffer.SubarrayEquals(crlfs, expecting, 0, expecting.Length)) { return; } bytesRead = bufferIndex - expecting.Length - crlfs; bufferIndex = expecting.Length + crlfs; // Now comes the main reading loop bool processingHeaders = true; string currentHeaders = ""; string currentFieldName = null; Stream currentWritingStream = null; bool currentIsFileUpload = false; string currentFileUploadFilename = null; string currentFileUploadContentType = null; string currentFileUploadTempFilename = null; Decoder utf8Decoder = Encoding.UTF8.GetDecoder(); char[] chArr = new char[1]; byte[] lastBoundary = ("\r\n--" + Headers.ContentMultipartBoundary + "--\r\n").ToUtf8(); byte[] middleBoundary = ("\r\n--" + Headers.ContentMultipartBoundary + "\r\n").ToUtf8(); var inMemoryFileUploads = new SortedList <long, List <FileUpload> >(); long inMemoryFileUploadsTotal = 0; while (bufferIndex > 0 || bytesRead > 0) { int writeIndex = 0; if (bytesRead > 0) { if (processingHeaders) { bool newLineFound = false; while (!newLineFound && bytesRead > 0) { int numCh = utf8Decoder.GetChars(buffer, bufferIndex, 1, chArr, 0); bufferIndex++; bytesRead--; if (numCh != 0) { currentHeaders += chArr[0]; } newLineFound = currentHeaders.EndsWith("\r\n\r\n"); } if (newLineFound) { currentIsFileUpload = false; currentFileUploadContentType = null; currentFileUploadFilename = null; currentFileUploadTempFilename = null; currentFieldName = null; currentWritingStream = null; foreach (string header in currentHeaders.Split(new string[] { "\r\n" }, StringSplitOptions.None)) { Match m; if ((m = Regex.Match(header, @"^content-disposition\s*:\s*(?:form-data|file)\s*;(.*)$", RegexOptions.IgnoreCase)).Success) { string v = m.Groups[1].Value; while (v.Length > 0) { m = Regex.Match(v, @"^\s*(\w+)=""([^""]*)""\s*(?:;\s*|$)"); if (!m.Success) { m = Regex.Match(v, @"^\s*(\w+)=([^;]*)\s*(?:;\s*|$)"); } if (!m.Success) { break; } if (m.Groups[1].Value.ToLowerInvariant() == "name") { currentFieldName = m.Groups[2].Value; } else if (m.Groups[1].Value.ToLowerInvariant() == "filename") { currentFileUploadFilename = m.Groups[2].Value; } v = v.Substring(m.Length); } } else if ((m = Regex.Match(header, @"^content-type\s*:\s*(.*)$", RegexOptions.IgnoreCase)).Success) { currentFileUploadContentType = m.Groups[1].Value; } } if (currentFieldName != null) { currentWritingStream = new MemoryStream(); if (currentFileUploadFilename != null) { currentIsFileUpload = true; } } processingHeaders = false; continue; } } else if (bytesRead >= lastBoundary.Length) // processing content { bool boundaryFound = false; bool end = false; int boundaryIndex = buffer.IndexOfSubarray(lastBoundary, bufferIndex, bytesRead); if (boundaryIndex != -1) { boundaryFound = true; end = true; } int middleBoundaryIndex = buffer.IndexOfSubarray(middleBoundary, bufferIndex, bytesRead); if (middleBoundaryIndex != -1 && (!boundaryFound || middleBoundaryIndex < boundaryIndex)) { boundaryFound = true; boundaryIndex = middleBoundaryIndex; end = false; } int howMuchToWrite = boundaryFound // If we have encountered the boundary, write all the data up to it ? boundaryIndex - bufferIndex // Write as much of the data to the output stream as possible, but leave enough so that we can still recognise the boundary : bytesRead - lastBoundary.Length; // this is never negative because of the "if" we're in // Write the aforementioned amount of data to the output stream if (howMuchToWrite > 0 && currentWritingStream != null) { // If we're currently processing a file upload in memory, and it takes the total file uploads over the limit... if (currentIsFileUpload && currentWritingStream is MemoryStream && ((MemoryStream)currentWritingStream).Length + inMemoryFileUploadsTotal + howMuchToWrite > storeFileUploadInFileAtSize) { var memory = (MemoryStream)currentWritingStream; var inMemoryKeys = inMemoryFileUploads.Keys; if (inMemoryKeys.Count > 0 && memory.Length < inMemoryKeys[inMemoryKeys.Count - 1]) { // ... switch the largest one to a temporary file var lastKey = inMemoryKeys[inMemoryKeys.Count - 1]; var biggestUpload = inMemoryFileUploads[lastKey][0]; inMemoryFileUploads[lastKey].RemoveAt(0); biggestUpload.LocalFilename = HttpInternalObjects.RandomTempFilepath(tempPath, out var fileStream); fileStream.Write(biggestUpload.Data, 0, biggestUpload.Data.Length); fileStream.Close(); fileStream.Dispose(); inMemoryFileUploadsTotal -= biggestUpload.Data.LongLength; biggestUpload.Data = null; if (inMemoryFileUploads[lastKey].Count == 0) { inMemoryFileUploads.Remove(lastKey); } } else { // ... switch this one to a temporary file currentFileUploadTempFilename = HttpInternalObjects.RandomTempFilepath(tempPath, out currentWritingStream); memory.WriteTo(currentWritingStream); memory.Close(); memory.Dispose(); } } currentWritingStream.Write(buffer, bufferIndex, howMuchToWrite); } // If we encountered the boundary, add this field to _postFields or this upload to _fileUploads or inMemoryFileUploads if (boundaryFound) { if (currentWritingStream != null) { currentWritingStream.Close(); if (!currentIsFileUpload) { // It's a normal field _postFields[currentFieldName].Add(Encoding.UTF8.GetString(((MemoryStream)currentWritingStream).ToArray())); } else { // It's a file upload var fileUpload = new FileUpload(currentFileUploadContentType, currentFileUploadFilename); if (currentFileUploadTempFilename != null) { // The file upload has already been written to disk fileUpload.LocalFilename = currentFileUploadTempFilename; } else { // The file upload is still in memory. Keep track of it in inMemoryFileUploads so that we can still write it to disk later if necessary var memory = (MemoryStream)currentWritingStream; fileUpload.Data = memory.ToArray(); inMemoryFileUploads.AddSafe(fileUpload.Data.LongLength, fileUpload); inMemoryFileUploadsTotal += fileUpload.Data.LongLength; } _fileUploads[currentFieldName] = fileUpload; } currentWritingStream.Dispose(); currentWritingStream = null; } // If that was the final boundary, we are done if (end) { break; } // Consume the boundary and go back to processing headers bytesRead -= boundaryIndex - bufferIndex + middleBoundary.Length; bufferIndex = boundaryIndex + middleBoundary.Length; processingHeaders = true; currentHeaders = ""; utf8Decoder.Reset(); continue; } else { // No boundary there. Received data has been written to the currentWritingStream above. // Now copy the remaining little bit (which may contain part of the bounary) into a new buffer switchBuffer(bufferIndex + howMuchToWrite, bytesRead - howMuchToWrite); bytesRead -= howMuchToWrite; writeIndex = bytesRead; } } else if (bufferIndex > 0) { // We are processing content, but there is not enough data in the buffer to ensure that it doesn't contain part of the boundary. // Therefore, just copy the data to a new buffer and continue receiving more switchBuffer(bufferIndex, bytesRead); writeIndex = bytesRead; } } bufferIndex = 0; // We need to read enough data to contain the boundary do { bytesRead = body.Read(buffer, writeIndex, bufferSize - writeIndex); if (bytesRead == 0) { // Premature end of content. We want to allow broken clients (such as UnityWebRequest) to work, so tolerate this if (currentWritingStream != null) { currentWritingStream.Write(buffer, 0, writeIndex); currentWritingStream.Close(); if (!currentIsFileUpload) { // It's a normal field _postFields[currentFieldName].Add(Encoding.UTF8.GetString(((MemoryStream)currentWritingStream).ToArray())); } else { // It's a file upload var fileUpload = new FileUpload(currentFileUploadContentType, currentFileUploadFilename); if (currentFileUploadTempFilename != null) { fileUpload.LocalFilename = currentFileUploadTempFilename; } else { fileUpload.Data = ((MemoryStream)currentWritingStream).ToArray(); } _fileUploads[currentFieldName] = fileUpload; } currentWritingStream.Dispose(); } return; } writeIndex += bytesRead; }while (writeIndex < lastBoundary.Length); bytesRead = writeIndex; } }
/// <summary> /// Generate the left and right edges of the paths in this network, /// automatically joining them at shared end nodes. Nodes should have been /// generated for the network prior to calling this function. /// </summary> /// <typeparam name="TPath"></typeparam> /// <param name="paths"></param> public static void GenerateNetworkPathEdges <TPath>(this IList <TPath> paths) where TPath : IWidePath { var pathMap = new Dictionary <Guid, TPath>(); // Generate initial curves + build map: foreach (TPath path in paths) { if (path.Spine != null) { pathMap[path.Spine.GUID] = path; path.GenerateInitialPathEdges(); path.CurveInitialPathEdges(); } } NodeCollection nodes = paths.ExtractNetworkPathNodes(); // Trim edges at nodes: foreach (Node node in nodes) { if (node.Vertices.Count > 0) { // Sort connected vertices by the angle pointing away from the node var angleSorted = new SortedList <double, Vertex>(node.Vertices.Count); foreach (var v in node.Vertices) { if (v.Owner != null && v.Owner is Curve && pathMap.ContainsKey(v.Owner.GUID)) { Curve crv = (Curve)v.Owner; if (v.IsStart) { angleSorted.AddSafe(crv.TangentAt(0).Angle, v); } else if (v.IsEnd) { angleSorted.AddSafe(crv.TangentAt(1).Reverse().Angle, v); } } } if (angleSorted.Count > 1) { for (int i = 0; i < angleSorted.Count - 1; i++) { // Reference case is path leading away from node Vertex vR = angleSorted.Values.GetWrapped(i - 1); Vertex v = angleSorted.Values[i]; Vertex vL = angleSorted.Values.GetWrapped(i + 1); Angle a = new Angle(angleSorted.Keys[i]).NormalizeTo2PI(); Angle aR = new Angle(angleSorted.Keys.GetWrapped(i - 1) - a); Angle aL = new Angle(angleSorted.Keys.GetWrapped(i + 1) - a).Explement(); TPath pathR = pathMap[vR.Owner.GUID]; TPath path = pathMap[v.Owner.GUID]; TPath pathL = pathMap[vL.Owner.GUID]; // Work out correct edges to match up based on direction: Vertex edgeVR; Vertex edgeVL; double offsR; double offsL; if (v.IsStart) { // Curve is pointing away from the node edgeVR = path.RightEdge.Start; edgeVL = path.LeftEdge.Start; offsR = path.RightOffset; offsL = path.LeftOffset; } else { // Curve is pointing towards the node - flip everything! edgeVR = path.LeftEdge.End; edgeVL = path.RightEdge.End; offsR = path.LeftOffset; offsL = path.RightOffset; } Vertex edgeVR2; double offsR2; if (vR.IsStart) { edgeVR2 = pathR.LeftEdge.Start; offsR2 = pathR.LeftOffset; } else { edgeVR2 = pathR.RightEdge.End; offsR2 = pathR.RightOffset; } Vertex edgeVL2; double offsL2; if (vL.IsStart) { edgeVL2 = pathL.RightEdge.Start; offsL2 = pathL.RightOffset; } else { edgeVL2 = pathL.LeftEdge.End; offsL2 = pathL.LeftOffset; } bool detectMismatches = false; bool canTrimR = true; bool canTrimR2 = true; if (aR.IsReflex) { if (offsR > offsR2) { canTrimR = false; } else if (offsR2 > offsR) { canTrimR2 = false; } } if (!Curve.MatchEnds(edgeVR, edgeVR2, detectMismatches, canTrimR, canTrimR2)) { if (offsR > offsR2) { Curve.ExtendToLineXY(edgeVR2, node.Position, edgeVR.Position - node.Position); path.SetEndEdge(edgeVR, new Line(edgeVR.Position, edgeVR2.Position)); } else { Curve.ExtendToLineXY(edgeVR, node.Position, edgeVR2.Position - node.Position); pathR.SetEndEdge(edgeVR2, new Line(edgeVR2.Position, edgeVR.Position)); } /*(Curve crv = Curve.Connect(edgeVR, edgeVR2.Position); * if (crv != null) * { * if (v.IsStart) path.RightEdge = crv; * else path.LeftEdge = crv; * }*/ } bool canTrimL = true; bool canTrimL2 = true; if (aL.IsReflex) { if (offsL > offsL2) { canTrimL = false; } else if (offsL2 > offsL) { canTrimL2 = false; } } if (!Curve.MatchEnds(edgeVL, edgeVL2, detectMismatches, canTrimL, canTrimL2)) { if (offsL > offsL2) { Curve.ExtendToLineXY(edgeVL2, node.Position, edgeVL.Position - node.Position); path.SetEndEdge(edgeVL, new Line(edgeVL.Position, edgeVL2.Position)); } else { Curve.ExtendToLineXY(edgeVL, node.Position, edgeVL2.Position - node.Position); pathL.SetEndEdge(edgeVL2, new Line(edgeVL2.Position, edgeVL.Position)); } /*Curve crv = Curve.Connect(edgeVL, edgeVL2.Position); * if (crv != null) * { * if (v.IsStart) path.LeftEdge = crv; * else path.RightEdge = crv; * }*/ } } } else if (angleSorted.Count == 1) { // Close off end: Vertex v = angleSorted.Values[0]; TPath path = pathMap[v.Owner.GUID]; if (v.IsStart) { //Extend to offset: if (path.StartOffset != 0) { path.LeftEdge.ExtendStart(path.StartOffset); path.RightEdge.ExtendStart(path.StartOffset); } path.StartCapLeft = new Line(path.LeftEdge.StartPoint, path.RightEdge.StartPoint); } else { //Extend to offset: if (path.EndOffset != 0) { path.LeftEdge.ExtendEnd(path.EndOffset); path.RightEdge.ExtendEnd(path.EndOffset); } path.EndCapLeft = new Line(path.LeftEdge.EndPoint, path.RightEdge.EndPoint); } } } } }
/// <summary> /// Quadrangulate this mesh by merging adjacent tris into quads. /// The algorithm will prioritise merging the longest edges first /// </summary> public void Quadrangulate() { var sortedPairs = new SortedList <double, Pair <MeshFace, MeshFace> >(Count); // Find adjacent pairs of tris and sort by resultant face 'squareness' for (int i = 0; i < Count - 1; i++) { MeshFace faceA = this[i]; if (faceA.IsTri) { for (int j = i + 1; j < Count; j++) { MeshFace faceB = this[j]; if (faceB.IsTri) { double squareness = faceA.SharedEdgeSquareness(faceB); if (!squareness.IsNaN() && squareness.Abs() < 0.8) { sortedPairs.AddSafe(squareness, Pair.Create(faceA, faceB)); } } } } } // Reverse through pairs and join: for (int i = 0; i < sortedPairs.Count; i++) { var pair = sortedPairs.Values[i]; if (Contains(pair.First.GUID) && Contains(pair.Second.GUID)) { Remove(pair.First); Remove(pair.Second); Add(pair.First.MergeWith(pair.Second)); } } // Version 0.2: also deprecated /*var sortedPairs = new SortedList<double, Pair<MeshFace, MeshFace>>(Count); * // Find adjacent pairs of tris and sort by edge length * for (int i = 0; i < Count - 1; i++) * { * MeshFace faceA = this[i]; * if (faceA.IsTri) * { * for (int j = i + 1; j < Count; j++) * { * MeshFace faceB = this[j]; * if (faceB.IsTri) * { * double length = faceA.SharedEdgeLengthSquared(faceB); * if (length > 0) * sortedPairs.AddSafe(length, Pair.Create(faceA, faceB)); * } * } * } * } * * // Reverse through pairs and join: * for (int i = sortedPairs.Count - 1; i >= 0; i--) * { * var pair = sortedPairs.Values[i]; * if (Contains(pair.First.GUID) && Contains(pair.Second.GUID)) * { * Remove(pair.First); * Remove(pair.Second); * Add(pair.First.MergeWith(pair.Second)); * } * }*/ // Version 0.1: Deprecated: /* * // Populate lists: * var sortedLists = new SortedList<double, IList<MeshFace>>(Count / 2); * * foreach (MeshFace face in this) * { * if (face.IsTri) * { * double longEdge = face.LongestEdgeLengthSquared(); * * if (!sortedLists.ContainsKey(longEdge)) * sortedLists.Add(longEdge, new MeshFaceCollection()); * * sortedLists[longEdge].Add(face); * } * } * * foreach (IList<MeshFace> faceSet in sortedLists.Values) * { * for (int i = 0; i < faceSet.Count - 1; i++) * { * MeshFace faceA = faceSet[i]; * for (int j = i + 1; j < faceSet.Count; j++) * { * MeshFace faceB = faceSet[j]; * if (faceA.SharedVertexCount(faceB) == 2) // Has a shared edge * { * // Merge faces and replace: * Remove(faceA); * Remove(faceB); * Add(faceA.MergeWith(faceB)); * * faceSet.RemoveAt(j); * j = faceSet.Count; * } * } * } * } */ }
/// <summary>If this request is a POST request, replaces the body of the request with data from the specified stream. /// This will clear and reinitialise all the POST parameter values and file uploads.</summary> /// <param name="body">Stream to read new POST request body from.</param> /// <param name="tempPath">The temporary directory to use for file uploads. Default is <see cref="Path.GetTempPath"/>.</param> /// <param name="storeFileUploadInFileAtSize">The maximum size (in bytes) at which file uploads are stored in memory. /// Any uploads that exceed this limit are written to temporary files on disk. Default is 16 MB.</param> internal void ParsePostBody(Stream body, string tempPath = null, long storeFileUploadInFileAtSize = 16*1024*1024) { _fileUploads.Clear(); _postFields.Clear(); if (Method != HttpMethod.Post) return; if (Headers.ContentType == HttpPostContentType.ApplicationXWwwFormUrlEncoded) { using (var reader = new StreamReader(body, Encoding.UTF8)) _postFields = HttpHelper.ParseQueryValueParameters(reader).ToNameValuesCollection(); return; } // An excessively long boundary is going to screw up the following algorithm. // (Actually a limit of up to bufferSize - 8 would work, but I think 1024 is more than enough.) if (body == null || Headers.ContentMultipartBoundary == null || Headers.ContentMultipartBoundary.Length > 1024) return; if (tempPath == null) tempPath = Path.GetTempPath(); // Instead of reallocating a new buffer multiple times, allocate at most two buffers and switch between them as necessary int bufferSize = 65536; byte[] buffer1 = new byte[bufferSize]; byte[] buffer2 = null; byte[] buffer = buffer1; Action<int, int> switchBuffer = (offset, count) => { if (buffer == buffer1) { if (buffer2 == null) buffer2 = new byte[bufferSize]; Buffer.BlockCopy(buffer, offset, buffer2, 0, count); buffer = buffer2; } else { Buffer.BlockCopy(buffer, offset, buffer1, 0, count); buffer = buffer1; } }; // Process POST request upload data int bytesRead = body.Read(buffer, 0, bufferSize); if (bytesRead == 0) // premature end of request body return; // We expect the input to begin with "--" followed by the boundary followed by "\r\n" byte[] expecting = ("--" + Headers.ContentMultipartBoundary + "\r\n").ToUtf8(); int bufferIndex = bytesRead; while (bufferIndex < expecting.Length) { bytesRead = body.Read(buffer, bufferIndex, buffer.Length - bufferIndex); if (bytesRead == 0) // premature end of request body return; bufferIndex += bytesRead; } if (!buffer.SubarrayEquals(0, expecting, 0, expecting.Length)) return; bytesRead = bufferIndex - expecting.Length; bufferIndex = expecting.Length; // Now comes the main reading loop bool processingHeaders = true; string currentHeaders = ""; string currentFieldName = null; Stream currentWritingStream = null; bool currentIsFileUpload = false; string currentFileUploadFilename = null; string currentFileUploadContentType = null; string currentFileUploadTempFilename = null; Decoder utf8Decoder = Encoding.UTF8.GetDecoder(); char[] chArr = new char[1]; byte[] lastBoundary = ("\r\n--" + Headers.ContentMultipartBoundary + "--\r\n").ToUtf8(); byte[] middleBoundary = ("\r\n--" + Headers.ContentMultipartBoundary + "\r\n").ToUtf8(); var inMemoryFileUploads = new SortedList<long, List<FileUpload>>(); long inMemoryFileUploadsTotal = 0; while (bufferIndex > 0 || bytesRead > 0) { int writeIndex = 0; if (bytesRead > 0) { if (processingHeaders) { bool newLineFound = false; while (!newLineFound && bytesRead > 0) { int numCh = utf8Decoder.GetChars(buffer, bufferIndex, 1, chArr, 0); bufferIndex++; bytesRead--; if (numCh != 0) currentHeaders += chArr[0]; newLineFound = currentHeaders.EndsWith("\r\n\r\n"); } if (newLineFound) { currentIsFileUpload = false; currentFileUploadContentType = null; currentFileUploadFilename = null; currentFileUploadTempFilename = null; currentFieldName = null; currentWritingStream = null; foreach (string header in currentHeaders.Split(new string[] { "\r\n" }, StringSplitOptions.None)) { Match m; if ((m = Regex.Match(header, @"^content-disposition\s*:\s*form-data\s*;(.*)$", RegexOptions.IgnoreCase)).Success) { string v = m.Groups[1].Value; while (v.Length > 0) { m = Regex.Match(v, @"^\s*(\w+)=""([^""]*)""\s*(?:;\s*|$)"); if (!m.Success) m = Regex.Match(v, @"^\s*(\w+)=([^;]*)\s*(?:;\s*|$)"); if (!m.Success) break; if (m.Groups[1].Value.ToLowerInvariant() == "name") currentFieldName = m.Groups[2].Value; else if (m.Groups[1].Value.ToLowerInvariant() == "filename") currentFileUploadFilename = m.Groups[2].Value; v = v.Substring(m.Length); } } else if ((m = Regex.Match(header, @"^content-type\s*:\s*(.*)$", RegexOptions.IgnoreCase)).Success) currentFileUploadContentType = m.Groups[1].Value; } if (currentFieldName != null) { currentWritingStream = new MemoryStream(); if (currentFileUploadFilename != null) currentIsFileUpload = true; } processingHeaders = false; continue; } } else if (bytesRead >= lastBoundary.Length) // processing content { bool boundaryFound = false; bool end = false; int boundaryIndex = buffer.IndexOfSubarray(lastBoundary, bufferIndex, bytesRead); if (boundaryIndex != -1) { boundaryFound = true; end = true; } int middleBoundaryIndex = buffer.IndexOfSubarray(middleBoundary, bufferIndex, bytesRead); if (middleBoundaryIndex != -1 && (!boundaryFound || middleBoundaryIndex < boundaryIndex)) { boundaryFound = true; boundaryIndex = middleBoundaryIndex; end = false; } int howMuchToWrite = boundaryFound // If we have encountered the boundary, write all the data up to it ? howMuchToWrite = boundaryIndex - bufferIndex // Write as much of the data to the output stream as possible, but leave enough so that we can still recognise the boundary : howMuchToWrite = bytesRead - lastBoundary.Length; // this is never negative because of the "if" we're in // Write the aforementioned amount of data to the output stream if (howMuchToWrite > 0 && currentWritingStream != null) { // If we're currently processing a file upload in memory, and it takes the total file uploads over the limit... if (currentIsFileUpload && currentWritingStream is MemoryStream && ((MemoryStream) currentWritingStream).Length + inMemoryFileUploadsTotal + howMuchToWrite > storeFileUploadInFileAtSize) { var memory = (MemoryStream) currentWritingStream; var inMemoryKeys = inMemoryFileUploads.Keys; if (inMemoryKeys.Count > 0 && memory.Length < inMemoryKeys[inMemoryKeys.Count - 1]) { // ... switch the largest one to a temporary file var lastKey = inMemoryKeys[inMemoryKeys.Count - 1]; var biggestUpload = inMemoryFileUploads[lastKey][0]; inMemoryFileUploads[lastKey].RemoveAt(0); Stream fileStream; biggestUpload.LocalFilename = HttpInternalObjects.RandomTempFilepath(tempPath, out fileStream); fileStream.Write(biggestUpload.Data, 0, biggestUpload.Data.Length); fileStream.Close(); fileStream.Dispose(); inMemoryFileUploadsTotal -= biggestUpload.Data.LongLength; biggestUpload.Data = null; if (inMemoryFileUploads[lastKey].Count == 0) inMemoryFileUploads.Remove(lastKey); } else { // ... switch this one to a temporary file currentFileUploadTempFilename = HttpInternalObjects.RandomTempFilepath(tempPath, out currentWritingStream); memory.WriteTo(currentWritingStream); memory.Close(); memory.Dispose(); } } currentWritingStream.Write(buffer, bufferIndex, howMuchToWrite); } // If we encountered the boundary, add this field to _postFields or this upload to _fileUploads or inMemoryFileUploads if (boundaryFound) { if (currentWritingStream != null) { currentWritingStream.Close(); if (!currentIsFileUpload) // It's a normal field _postFields[currentFieldName].Add(Encoding.UTF8.GetString(((MemoryStream) currentWritingStream).ToArray())); else { // It's a file upload var fileUpload = new FileUpload(currentFileUploadContentType, currentFileUploadFilename); if (currentFileUploadTempFilename != null) // The file upload has already been written to disk fileUpload.LocalFilename = currentFileUploadTempFilename; else { // The file upload is still in memory. Keep track of it in inMemoryFileUploads so that we can still write it to disk later if necessary var memory = (MemoryStream) currentWritingStream; fileUpload.Data = memory.ToArray(); inMemoryFileUploads.AddSafe(fileUpload.Data.LongLength, fileUpload); inMemoryFileUploadsTotal += fileUpload.Data.LongLength; } _fileUploads[currentFieldName] = fileUpload; } currentWritingStream.Dispose(); currentWritingStream = null; } // If that was the final boundary, we are done if (end) break; // Consume the boundary and go back to processing headers bytesRead -= boundaryIndex - bufferIndex + middleBoundary.Length; bufferIndex = boundaryIndex + middleBoundary.Length; processingHeaders = true; currentHeaders = ""; utf8Decoder.Reset(); continue; } else { // No boundary there. Received data has been written to the currentWritingStream above. // Now copy the remaining little bit (which may contain part of the bounary) into a new buffer switchBuffer(bufferIndex + howMuchToWrite, bytesRead - howMuchToWrite); bytesRead -= howMuchToWrite; writeIndex = bytesRead; } } else if (bufferIndex > 0) { // We are processing content, but there is not enough data in the buffer to ensure that it doesn't contain part of the boundary. // Therefore, just copy the data to a new buffer and continue receiving more switchBuffer(bufferIndex, bytesRead); writeIndex = bytesRead; } } bufferIndex = 0; // We need to read enough data to contain the boundary do { bytesRead = body.Read(buffer, writeIndex, bufferSize - writeIndex); if (bytesRead == 0) // premature end of content { if (currentWritingStream != null) { currentWritingStream.Close(); currentWritingStream.Dispose(); } return; } writeIndex += bytesRead; } while (writeIndex < lastBoundary.Length); bytesRead = writeIndex; } }