private List<PointF> DivideAndConquer(List<PointF> points) { if (points.Count == 2 || points.Count == 3) { // TODO make sure they are in CC order starting with the left-most return points; } // Find the middle index int middleIdx = points.Count / 2; if(points.Count % 2 == 1) middleIdx++; // DAC the left and right ranges of the points List<PointF> edgePtsLeft = DivideAndConquer(points.GetRange(0, middleIdx)); List<PointF> edgePtsRight = DivideAndConquer(points.GetRange(middleIdx, points.Count - middleIdx)); // Get upper and lower tangent indexes Tuple<int, int> upperTangentIndexes = FindUpperTangent(); Tuple<int, int> lowerTangentIndexes = FindLowerTangent(); int idxOfUpperTangentInEdgePtsL = upperTangentIndexes.Item1; int idxOfUpperTangentInEdgePtsR = upperTangentIndexes.Item2; int idxOfLowerTangentInEdgePtsL = lowerTangentIndexes.Item1; int idxOfLowerTangentInEdgePtsR = lowerTangentIndexes.Item2; // Combine the new hulls using the newly found indexes List<PointF> combinedEdgePts = new List<PointF>(); return combinedEdgePts; }
/// <summary> /// 比较两个结果文件CnpA、CnpB,并将比较结果存到第一个文件所在的目录下 /// CnpAAndCnpB.Cnp,CnpAOrCnpB.Cnp,CnpANotCnpB.Cnp,CnpBNotCnpA.Cnp /// return value: hashtable, values list as follows /// key:"lst1AndLst2", value:CnpAAndCnpB中的结果数 /// key:"lst1OrLst2", value:CnpAOrCnpB中的结果数 totalNum /// key:"lst1NotLst2", value:CnpANotCnpB中的结果数 addNum /// key:"lst2NotLst1", value:CnpBNotCnpA中的结果数 deleteNum /// </summary> public Hashtable Compare(String filePath1, byte[] file) { List<int> lst1 = GetResultList(filePath1); List<int> lst2 = new List<int>(); List<int> lst1AndLst2 = new List<int>(); List<int> lst1OrLst2 = new List<int>(); List<int> lst1NotLst2 = new List<int>(); List<int> lst2NotLst1 = new List<int>(); int lstLength1 = lst1.Count; int lstLength2 = lst2.Count; int i = 0; // lst1的指针 int j = 0; // lst2的指针 for (; i < lstLength1 && j < lstLength2; ) { if (lst1[i] == lst2[j]) { lst1AndLst2.Add(lst1[i]); lst1OrLst2.Add(lst1[i]); i++; j++; } else if (lst1[i] > lst2[j]) { lst2NotLst1.Add(lst2[j]); lst1OrLst2.Add(lst2[j]); j++; } else if (lst1[i] < lst2[j]) { lst1NotLst2.Add(lst1[i]); lst1OrLst2.Add(lst1[i]); i++; } } // 将lst2的剩下的内容存入list if (lstLength1 == i) { lst2NotLst1.AddRange(lst2.GetRange(j, lstLength2 - j)); lst1OrLst2.AddRange(lst2.GetRange(j, lstLength2 - j)); } // 将lst1的剩下的内容存入list else if (lstLength2 == j) { lst1NotLst2.AddRange(lst1.GetRange(i, lstLength1 - i)); lst1OrLst2.AddRange(lst1.GetRange(i, lstLength1 - i)); } Hashtable resultNum = new Hashtable(); //resultNum.Add("lst1AndLst2", lst1AndLst2.Count); //resultNum.Add("lst1OrLst2", lst1OrLst2.Count); //resultNum.Add("lst1NotLst2", lst1NotLst2.Count); //resultNum.Add("lst2NotLst1", lst2NotLst1.Count); lst1OrLst2.AddRange(lst2NotLst1); resultNum.Add("Num", lst1NotLst2.Count); resultNum.Add("File", lst1NotLst2.ToArray()); return resultNum; }
public static DataSeriesInfo GetDataSeriesInfo(string seriesName) { DataSeriesInfo dataSeriesInfo = new DataSeriesInfo(); dataSeriesInfo.SeriesName = seriesName; List<string> list = new List<string>((IEnumerable<string>)seriesName.Split(new char[] { '.' })); dataSeriesInfo.DataType = DataType.Unknown; switch (list[list.Count - 1]) { case "Daily": dataSeriesInfo.DataType = DataType.Daily; break; case "Trade": dataSeriesInfo.DataType = DataType.Trade; break; case "Quote": dataSeriesInfo.DataType = DataType.Quote; break; case "Depth": dataSeriesInfo.DataType = DataType.MarketDepth; break; } int count = 1; long result; if (dataSeriesInfo.DataType == DataType.Unknown && list.Count >= 4 && (list[list.Count - 3] == "Bar" && Enum.IsDefined(typeof(BarType), (object)list[list.Count - 2])) && long.TryParse(list[list.Count - 1], out result)) { dataSeriesInfo.DataType = DataType.Bar; dataSeriesInfo.BarType = (BarType)Enum.Parse(typeof(BarType), list[list.Count - 2]); dataSeriesInfo.BarSize = result; count = 3; } dataSeriesInfo.Symbol = string.Join('.'.ToString(), list.GetRange(0, list.Count - count).ToArray()); dataSeriesInfo.Suffix = string.Join('.'.ToString(), list.GetRange(list.Count - count, count).ToArray()); return dataSeriesInfo; }
/// <summary> /// /// </summary> /// <param name="ipds"></param> /// <param name="sampleSize"></param> /// <param name="windowSize"></param> public static List<decimal> Test(List<decimal> ipds, int sampleSize, int windowSize) { List<decimal> results = new List<decimal>(); while (ipds.Count > 0) { List<decimal> sampleIpds; if (ipds.Count > sampleSize) { sampleIpds = ipds.GetRange(0, sampleSize); ipds.RemoveRange(0, sampleSize); } else { sampleIpds = ipds.GetRange(0, ipds.Count); ipds.Clear(); } decimal result = CalculateRegularity(sampleIpds, windowSize); results.Add(result); } return results; }
private static void ReceiveUDP(IAsyncResult ar) { var u = (UdpClient)((UdpState)(ar.AsyncState)).u; var e = (IPEndPoint)((UdpState)(ar.AsyncState)).e; var rawBytes = new List<byte>(); rawBytes.AddRange(u.EndReceive(ar, ref e)); var FromAddress = rawBytes.GetRange(1, rawBytes[0]); var receiveBytes = rawBytes.GetRange(rawBytes[0] + 1, rawBytes.Count - 1 - rawBytes[0]).ToArray(); switch ((PDUbase.MessageTypeIndicator)receiveBytes[0]) { case PDUbase.MessageTypeIndicator.DeliverReport: case PDUbase.MessageTypeIndicator.Command: break; case PDUbase.MessageTypeIndicator.Submit: var packet = new SMS_Submit {BinaryForm = receiveBytes}; var hairpin = packet.DestinationAddress.ToString().StartsWith("21") || packet.DestinationAddress.ToString().StartsWith("11"); if (hairpin) { UDP.SendSMS(packet.UserData.ToString(), packet.DestinationAddress.ToString(), FromAddress.ToString()); break; } Http.SendSMS(packet.UserData.ToString(), packet.DestinationAddress.ToString(), ""); break; default: break; } }
private static List<Point> SimplifyLine(List<Point> points, double eps) { Line l = new Line(points[0], points[points.Count - 1]); double maxDist = 0; int maxIndex = 0; for (int i = 1; i < points.Count - 1; i++) { double dist = l.PointDistance(points[i]); if (dist > maxDist) { maxDist = dist; maxIndex = i; } } if (maxDist > eps) { List<Point> newPoints = new List<Point>(); List<Point> list1 = points.GetRange(0, maxIndex); List<Point> list2 = points.GetRange(maxIndex, points.Count - maxIndex); list1 = SimplifyLine(list1, eps); list2 = SimplifyLine(list2, eps); newPoints.AddRange(list1); newPoints.RemoveAt(newPoints.Count - 1); newPoints.AddRange(list2); return newPoints; } else { List<Point> newPoints = new List<Point>(); newPoints.Add(points[0]); newPoints.Add(points[points.Count - 1]); return newPoints; } }
// Player list is needed in OnPlayerList, OnPlayerLeave and server.onRoundOverPlayers public new static List<CPlayerInfo> GetPlayerList(List<string> words) { List<CPlayerInfo> lstReturnList = new List<CPlayerInfo>(); int currentOffset = 0; int parameterCount = 0; int playerCount = 0; if (words.Count > currentOffset && int.TryParse(words[currentOffset++], out playerCount) == true) { if (words.Count > 0 && int.TryParse(words[currentOffset++], out parameterCount) == true) { List<string> lstParameters = words.GetRange(currentOffset, parameterCount); currentOffset += parameterCount; for (int i = 0; i < playerCount; i++) { if (words.Count > currentOffset + (i * parameterCount)) { lstReturnList.Add(new CPlayerInfo(lstParameters, words.GetRange(currentOffset + i * parameterCount, parameterCount))); } } } } return lstReturnList; }
/// <summary> /// Parses csv to TestCases /// </summary> /// <param name="data">data parameter</param> /// <param name="resources">resources parameter</param> /// <returns>TestCase list</returns> public static TestCases Parse(List<List<string>> data, ResourceManager resources = null) { if (data == null || !data.Any()) { throw new ArgumentNullException("data"); } var rootHeader = HeaderParser.Parse(data); HeaderValidator.Validate(rootHeader); var cases = new TestCases(); var lastStartIndex = 0; for (var i = HeaderParser.HeaderRowCount; i < data.Count; i++) { var row = data[i]; if (!string.IsNullOrWhiteSpace(row[0])) { if (i != HeaderParser.HeaderRowCount) { cases.Add(TestCaseParser.Parse(rootHeader, data.GetRange(lastStartIndex, i - lastStartIndex), resources)); } lastStartIndex = i; } } cases.Add(TestCaseParser.Parse(rootHeader, data.GetRange(lastStartIndex, data.Count - lastStartIndex), resources)); return cases; }
static List<int> MergeSort(List<int> list) { if (list.Count < 2) return list;//прекъсване на рекурсията, няма нужда да се сортира 1 елемент - връщаме list непроменен List<int> left = MergeSort( list.GetRange( 0, list.Count/2 ) );//сортирам лявата половина на списъка List<int> right = MergeSort( list.GetRange( left.Count, list.Count - left.Count ) );//сортирам дясната половина на списъка List<int> result = new List<int>(); //left и right са сортирани, обединявам двата списъка в result: while (left.Count > 0 && right.Count > 0)//докато някой от двата списъка не се изпразни... { //...сравнявам първите елементи от двата списъка, премествам по-малкия в result if (left[0]<=right[0]) { result.Add(left[0]); left.RemoveAt(0); } else { result.Add(right[0]); right.RemoveAt(0); } } //ако някой от двата списъка все още не е празен (поне 1 от тях е празен) - добавям го в result result.AddRange(left); result.AddRange(right); return result; }
public override List<int> Sort(List<int> listToSort) { // Create a new deep copy of the list to avoid conflicts List<int> resultList = new List<int>(listToSort); // Trivial case if (resultList.Count <= 1) { return resultList; } // Determine middle index of the data set int middleIndex = resultList.Count / 2; // Split data into two vectors List<int> leftSubList = resultList.GetRange(0, middleIndex); List<int> rightSubList = resultList.GetRange(middleIndex, resultList.Count - middleIndex); // Recursive merge sort of the two vectors leftSubList = Sort(leftSubList); rightSubList = Sort(rightSubList); // Merge sorted reults return merge(leftSubList, rightSubList); }
public static void RealAllAsync(List<FilePathInfo> pathList) { int needThreadCount = 1; if (needThreadCount > 1) { int piece = pathList.Count / needThreadCount; for (int i = 0; i < needThreadCount - 1; i++) { new ReadFileThread(pathList.GetRange(i * piece, piece)); } new ReadFileThread(pathList.GetRange((needThreadCount - 1) * piece, pathList.Count - (needThreadCount - 1) * piece)); } else { new ReadFileThread(pathList); } while (true) { Thread.Sleep(1000); if (finThreadCount == needThreadCount) { break; } } }
// Get a part of a parsed phrase-- drop parents when possible, create a new surrounding group when needed // start can be negative: count from end // count can be negative: use that many less than all the elements public static IParsedPhrase GetSubphrase(IParsedPhrase phrase, int start, int count) { IEnumerable<IParsedPhrase> branches = phrase.Branches; List<IParsedPhrase> included = new List<IParsedPhrase>(); int ii = 0; foreach (IParsedPhrase branch in branches) if (ii++ >= start) included.Add(branch); // start can be negative: count from end if (start < 0) included = included.GetRange(included.Count + start, -start); if (count > 0) included = included.GetRange(0, count); // count can be negative: less than max elts if (count < 0) included = included.GetRange(0, included.Count + count); if (included.Count == 0) return null; // count <= start! if (included.Count == 1) return included[0]; return new GroupPhrase(phrase.Part, included); }
/// <summary> /// An estimated characteristic of occurrence of the subject word in the sequence /// </summary> /// <param name="accord"> /// Checking word. /// </param> /// <param name="chainLength"> /// Length of whole sequence. /// </param> /// <param name="winLen"> /// Length of the scanning window. /// </param> /// <param name="minusOne"> /// Data for "minus one" subword. /// </param> /// <param name="mid"> /// Data for "minus two" subword. /// </param> /// <returns> /// Design characteristic of occurrence of the word. /// </returns> public double DesignExpected(List<string> accord, int chainLength, int winLen, DataCollector minusOne, DataCollector mid) { int shortWord = 2; int midlLength = winLen - 2; int minusLength = winLen - 1; List<int> left = minusOne.Positions(accord.GetRange(0, accord.Count - 1)); List<int> right = minusOne.Positions(accord.GetRange(1, accord.Count - 1)); List<int> middle = midlLength != 0 ? mid.Positions(accord.GetRange(1, accord.Count - 2)) : new List<int>(); double criteria = -1; if (winLen == shortWord) { criteria = Frequency(left, chainLength, minusLength) * Frequency(right, chainLength, minusLength); } else if (middle != null) { criteria = (Frequency(left, chainLength, minusLength) * Frequency(right, chainLength, minusLength)) / Frequency(middle, chainLength, midlLength); } return criteria; }
public static void Purge() { // Keep only a certain number of each type of backup string logsPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), Constants.APPDATA_FOLDER, Constants.LOG_FOLDER); // If there's no folder, there probably aren't any logs if (!Directory.Exists(logsPath)) return; string[] files = Directory.GetFiles(logsPath, "*.log"); Array.Sort(files); string currentBackup = null; string backup; List<string> logsForBackup = new List<string>(); foreach (string file in files) { backup = Path.GetFileNameWithoutExtension(file); backup = backup.Remove(backup.LastIndexOf('-')); if (backup != currentBackup) { // Delete all but the last n files currently in the list if (logsForBackup.Count > Constants.LOGS_KEEP_NUM) { foreach (string fileToDelete in logsForBackup.GetRange(0, logsForBackup.Count - Constants.LOGS_KEEP_NUM)) { File.Delete(fileToDelete); } } logsForBackup.Clear(); currentBackup = backup; } logsForBackup.Add(file); } // And the final lot if (logsForBackup.Count > Constants.LOGS_KEEP_NUM) { foreach (string fileToDelete in logsForBackup.GetRange(0, logsForBackup.Count - Constants.LOGS_KEEP_NUM)) { File.Delete(fileToDelete); } } }
//Public entry function. All evaluations start here. If the expression is empty, return zero. //If the new operand is the start of a bracketed term, continue evaluating with an accumulator to collect the terms inside the brackets. //Otherwise, peel off the first number and continue evaluating. public static float evaluate(List<object> expression){ if(expression.Count==0) return 0.0f; else if(expression[0].GetType()==typeof(OpenBracket)) return evaluate (new List<object>(), expression.GetRange(1, expression.Count-1)); return evaluate ((float)expression[0], expression.GetRange(1, expression.Count-1)); }
/// <summary> /// Splits the shaders. /// </summary> /// <param name="shaderBuffer">The shader buffer.</param> private Shader SplitShaders(List<string> shaderBuffer,ref string name){ int vertexShaderOffset=0, fragmentShaderOffset=0, lineCount=0; string version=""; foreach (string line in shaderBuffer) { if (line.Contains("#pragma vertex")) { vertexShaderOffset = lineCount + 1; } else if (line.Contains("#pragma fragment")) { fragmentShaderOffset = lineCount + 1; } else if (line.Contains ("#version")) { version = line; } lineCount++; } List<string> vertexShaderBuffer = shaderBuffer.GetRange(vertexShaderOffset, fragmentShaderOffset-vertexShaderOffset); vertexShaderBuffer.Insert (0, version); List<string> fragmentShaderBuffer = shaderBuffer.GetRange(fragmentShaderOffset, lineCount-fragmentShaderOffset); fragmentShaderBuffer.Insert (0, version); string vertexShader, fragmentShader; ProcessIncludes(vertexShaderBuffer, out vertexShader); ProcessIncludes(fragmentShaderBuffer, out fragmentShader); return new Shader (ref vertexShader,ref fragmentShader,ref name); }
//autocorr public static double computeAutocorr(List<double> values) { if (values.Count < 3) return 0; List<double> originList = values.GetRange(0, values.Count - 1), shiftedList = values.GetRange(1, values.Count - 1); return Statistics.Covariance(originList, shiftedList) / (Statistics.StandardDeviation(originList) * Statistics.StandardDeviation(shiftedList)); }
public override void ParseData(List<byte> bytes) { if (bytes.Count >= 48) { _data.SetBytes(bytes.GetRange(0, 36)); _rateLimit.SetBytes(bytes.GetRange(36, 12)); } }
//Return the float if the expression has been completely evaluated. Otherwise continue evaluating based on the next operator private static float evaluate(float f, List<object> tail){ if(tail.Count==0) return f; if(tail[0].GetType().IsSubclassOf(typeof(ProportionalOp))) return evaluate(f, (ProportionalOp)tail[0], tail.GetRange(1, tail.Count-1)); else //we still expect an operator, so it must be an incremental return evaluate(f, (IncrementalOp)tail[0], tail.GetRange(1, tail.Count-1)); }
//If the end of an expression has been reached reolve the current addition/subtraction and return the result. //If not resolve the current addition and subtraction if the next operation is of equal precedence. //Otherwise continue evaluating. private static float evaluate(float f1, IncrementalOp op, float f2, List<object> tail){ if(tail.Count==0) return MathResolver.resolve((MathOp)op, f1, f2); else if(tail[0].GetType().IsSubclassOf(typeof(ProportionalOp))) return evaluate(f1, op, f2, (ProportionalOp)tail[0], tail.GetRange(1, tail.Count-1)); else //we still expect an operator, so it must be an incremental return evaluate(MathResolver.resolve((MathOp)op, f1, f2), (IncrementalOp)tail[0], tail.GetRange(1, tail.Count-1)); }
public static void TestUseCase() { var list = new List<int>(new[] { 1, 2, 3, 4, 5, 6, 7, 8, 9 }); Assert.AreDeepEqual(list.GetRange(0, 2).ToArray(), new[] { 1, 2 }, "Bridge532 (0, 2)"); Assert.AreDeepEqual(list.GetRange(1, 2).ToArray(), new[] { 2, 3 }, "Bridge532 (1, 2)"); Assert.AreDeepEqual(list.GetRange(6, 3).ToArray(), new[] { 7, 8, 9 }, "Bridge532 (6, 3)"); }
public static List<float> BinaryFilter(List<float> list, CompareMethod compareMethod, float value) { int direction = (compareMethod == CompareMethod.smallerThan) ? -1 : 1; int middleIndex = 0, offset = 0; bool done = false; int firstIndex = 0; int lastIndex = list.Count - 1; while (!done) { middleIndex = GetMiddleIndex(firstIndex, lastIndex); float indexVal = list[middleIndex]; if (indexVal == value) { done = true; } else if (indexVal > value) { if (lastIndex - firstIndex <= 1) { done = true; offset = 1; } else { lastIndex = middleIndex; } } else if (indexVal < value) { if (lastIndex - firstIndex <= 1) { done = true; offset = -1; } else { firstIndex = middleIndex; } } } if (direction == -1 && (offset <= 0)) return list.GetRange(0, middleIndex + 1); else if (direction == 1 && (offset >= 0)) return list.GetRange(middleIndex, list.Count - middleIndex); else if (direction == 1 && offset == -1) return list.GetRange(middleIndex + 1, list.Count - (middleIndex + 1)); else if (direction == -1 && offset == 1) return list.GetRange(0, middleIndex); return null; }
internal ParamsFunctionReader(int numArgs, List<ValueReader> args, Type ret, Func<ValueReader[], ValueReader[], object> function) : base(args, ret) { Args = args.GetRange(0, numArgs).ToArray(); Params = args.GetRange(numArgs, args.Count - numArgs).ToArray(); Function = function; ParamCopy = new ValueReader[Params.Length]; }
public void SetBytes(List<byte> bytes) { if (bytes.Count >= 29) { _position.SetBytes(bytes.GetRange(0, 12)); _attitude.SetBytes(bytes.GetRange(12, 16)); _ccw = BitConverter.ToBoolean(bytes.ToArray(), 28); } }
static void Main() { #region CREATE TEST OBJ //discipline List<Disciplines> disciplines = new List<Disciplines>(); disciplines.Add(new Disciplines("Chemistry", 4, 6)); disciplines.Add(new Disciplines("Math", 10, 10)); disciplines.Add (new Disciplines("Biology", 8, 6)); disciplines.Add(new Disciplines("Insurance", 10, 6)); disciplines.Add(new Disciplines("Informatics", 10, 16)); //teachers List<Teacher> teachers = new List<Teacher>(); teachers.Add(new Teacher("Manolov",disciplines.GetRange(3,1))); teachers.Add(new Teacher("Minkov",disciplines.GetRange(0,2))); teachers.Add(new Teacher("Marinov", disciplines.GetRange(2, 1))); teachers.Add(new Teacher("Ovcharov", disciplines.GetRange(0, 3))); //students List<Student> students = new List<Student>(); students.Add(new Student("Martin", 3)); students.Add(new Student("Darin", 13)); students.Add(new Student("Rumqna", 6)); students.Add(new Student("Emil", 33)); students.Add(new Student("Nikola", 7)); students.Add(new Student("Georgi", 1)); //SchoolClasses List<SchoolClass> schoolClasses = new List<SchoolClass>(); schoolClasses.Add(new SchoolClass(teachers, students, "3133")); //school School school = new School("School Akademy",schoolClasses); #endregion //-----TEST SCHOOL------- Console.WriteLine(school); #region TEST OPTIONAL COMMENTS Student vasko = new Student("Vasko",3); vasko.FreeComments = "OPTIONAL COMMENT TEST"; vasko.ShowFreeComments(); Teacher ra = new Teacher("Vasko", disciplines); ra.FreeComments = "OPTIONAL COMMENT TEST"; ra.ShowFreeComments(); SchoolClass da = new SchoolClass(teachers,students,"31231"); da.FreeComments = "OPTIONAL COMMENT TEST"; da.ShowFreeComments(); #endregion }
void Divide(List<Vector3> toDivide, List<List<Vector3>> subsets) { if (toDivide.Count <= 3) { subsets.Add (toDivide); } else { int middleIndex = toDivide.Count / 2; Divide (toDivide.GetRange (0, middleIndex), subsets); Divide (toDivide.GetRange (middleIndex, toDivide.Count - middleIndex), subsets); } }
//Interquartile range public static double computeIQR(List<double> values) { //var median = Statistics.Median() values.Sort(); int chunkLength = (int) values.Count / 2; List<double> firstHalf = values.GetRange(0, chunkLength) , secondHalf = values.GetRange(values.Count() - chunkLength, chunkLength); return Statistics.Median(secondHalf) - Statistics.Median(firstHalf); }
public void SetBytes(List<byte> bytes) { if (bytes.Count >= 40) { Accelerometer.SetBytes(bytes.GetRange(0, 12)); Gyro.SetBytes(bytes.GetRange(12, 12)); Magnometer.SetBytes(bytes.GetRange(24, 12)); _pressure = BitConverter.ToInt32(bytes.ToArray(), 36); } }
static void Main(string[] args) { List<String> files = new List<String>(); files.AddRange(Directory.GetFiles(diretorioOrigem)); Thread thread1 = new Thread(() => { Move(files.GetRange(0, 5)); }); thread1.Name = "thread1"; thread1.Start(); Thread thread2 = new Thread(() => { Move(files.GetRange(5, 5)); }); thread2.Name = "thread2"; thread2.Start(); }
static List<Point> RecursiveSimplify(List<Point> points, double tolerance, bool isPolygon, bool isTopLevel) { double maxOrthoDistance = 0; int index = 0; for (int ii = 1; ii < points.Count - 1; ii++) { double orthoDistance = points[ii].Distance(points[points.Count-1], points[0]); if (orthoDistance > maxOrthoDistance) { maxOrthoDistance = orthoDistance; index = ii; } } if (maxOrthoDistance >= tolerance) { List<Point> pts1 = RecursiveSimplify(points.GetRange(0, index), tolerance, isPolygon, false); List<Point> pts2 = RecursiveSimplify(points.GetRange(index, points.Count-index), tolerance, isPolygon, false); pts1.RemoveAt(pts1.Count-1); pts1.AddRange(pts2); return pts1; } else { List<Point> result = new List<Point>(); result.Add(points[0]); if (isTopLevel && isPolygon) { result.Add(points[index]); } result.Add(points[points.Count-1]); return result; } }
private void RefreshListView() { ListViewState state = GetListViewState(listViewMessages); SMSSelectorData data = new SMSSelectorData(textBoxMessageContains.Text, comboBoxPhone.Text); data.ReceivedFrom = checkBoxReceivedAfter.Checked ? (DateTime?)dateTimePickerReceivedAfter.Value : null; data.ReceivedTo = checkBoxReceivedBefore.Checked ? (DateTime?)dateTimePickerReceivedBefore.Value : null; if (radioButtonAnd.Checked) { compositeSelector.SelectorBooleanFunction = SMSSelectorComposite.SelectorBoolFunc.And; } else { compositeSelector.SelectorBooleanFunction = SMSSelectorComposite.SelectorBoolFunc.Or; } List <Message> filtered = filter.Filter(messageHistoryCopy, data)?.ToList(); IEnumerable <Message> displayed = filtered?.GetRange(0, Math.Min(filtered.Count, MAXIMUM_OUTPUT)); ShowMessages(displayed); RestoreState(listViewMessages, state); }
public void SendMessage(long userId, string message = "", long?photoId = null, List <string> keyboardLabels = null) { var keyboard = CreateKeyboard(keyboardLabels?.GetRange(0, keyboardLabels.Count < 36 ? keyboardLabels.Count : 36)); _api.Messages.Send(new MessagesSendParams { UserId = userId, Message = message, Keyboard = keyboard, Attachments = _vkGroupPhotoService.Get(photoId) }); }
private void FixInvalidLayout(LayoutTreeNode node, List <LayoutControlItemBase> items) { List <LayoutTree.BoundsEndpoint> boundsEndpointList = new List <LayoutTree.BoundsEndpoint>(); foreach (LayoutControlItemBase layoutControlItemBase in items) { boundsEndpointList.Add(new LayoutTree.BoundsEndpoint(layoutControlItemBase.Bounds.Left, false, layoutControlItemBase)); boundsEndpointList.Add(new LayoutTree.BoundsEndpoint(layoutControlItemBase.Bounds.Right, true, layoutControlItemBase)); } boundsEndpointList.Sort(new Comparison <LayoutTree.BoundsEndpoint>(this.CompareEndpoints)); List <LayoutControlItemBase> layoutControlItemBaseList1 = new List <LayoutControlItemBase>(); Dictionary <LayoutControlItemBase, object> dictionary = new Dictionary <LayoutControlItemBase, object>(); int num1 = -1; int num2 = int.MaxValue; int x = 0; for (int index = 0; index < boundsEndpointList.Count; ++index) { if (!boundsEndpointList[index].IsEnd) { dictionary.Add(boundsEndpointList[index].Item, (object)null); } else { dictionary.Remove(boundsEndpointList[index].Item); layoutControlItemBaseList1.Add(boundsEndpointList[index].Item); if (dictionary.Count <= num2 && x != boundsEndpointList[index].Location && layoutControlItemBaseList1.Count < items.Count && (dictionary.Count <num2 || (double)Math.Abs((float)num1 - (float)items.Count / 2f)> (double) Math.Abs((float)layoutControlItemBaseList1.Count - (float)items.Count / 2f))) { num1 = layoutControlItemBaseList1.Count; num2 = dictionary.Count; x = boundsEndpointList[index].Location; } } } if (num1 == -1) { throw new InvalidOperationException("Items' layout is invalid"); } if ((double)x == (double)node.Bounds.Right) { x = Math.Max(x - 30, (int)(((double)node.Bounds.Left + (double)node.Bounds.Right) / 2.0)); } List <LayoutControlItemBase> range1 = layoutControlItemBaseList1.GetRange(0, num1); List <LayoutControlItemBase> range2 = layoutControlItemBaseList1.GetRange(num1, layoutControlItemBaseList1.Count - num1); List <LayoutControlItemBase> layoutControlItemBaseList2 = new List <LayoutControlItemBase>(); foreach (LayoutControlItemBase layoutControlItemBase in range2) { if (layoutControlItemBase.Bounds.X < x) { layoutControlItemBaseList2.Add(layoutControlItemBase); } } foreach (LayoutControlItemBase layoutControlItemBase in layoutControlItemBaseList2) { if (Math.Abs(layoutControlItemBase.Bounds.X - x) > Math.Abs(layoutControlItemBase.Bounds.Right - x) && range2.Count > 1) { layoutControlItemBase.Bounds = new Rectangle(layoutControlItemBase.Bounds.Location, new Size(Math.Max(30, x - layoutControlItemBase.Bounds.X), layoutControlItemBase.Bounds.Height)); range1.Add(layoutControlItemBase); range2.Remove(layoutControlItemBase); } else { layoutControlItemBase.Bounds = new Rectangle(new Point(x, layoutControlItemBase.Bounds.Y), new Size(Math.Max(30, layoutControlItemBase.Bounds.Right - x), layoutControlItemBase.Bounds.Height)); } } node.SplitType = Orientation.Horizontal; node.SplitPosition = (float)range2[0].Bounds.Left; LayoutTreeNode node1 = new LayoutTreeNode(); node1.Parent = node; node1.Bounds = this.CalculateSplitBounds(node, true); LayoutTreeNode node2 = new LayoutTreeNode(); node2.Parent = node; node2.Bounds = this.CalculateSplitBounds(node, false); node.Left = node1; node.Right = node2; if (!this.TrySplit(node1, range1, true) && !this.TrySplit(node1, range1, false)) { this.FixInvalidLayout(node1, range1); } if (this.TrySplit(node2, range2, true) || this.TrySplit(node2, range2, false)) { return; } this.FixInvalidLayout(node2, range2); }
private GLTFMesh ExportMesh(BabylonMesh babylonMesh, GLTF gltf, BabylonScene babylonScene) { RaiseMessage("GLTFExporter.Mesh | Export mesh named: " + babylonMesh.name, 1); // -------------------------- // --- Mesh from babylon ---- // -------------------------- if (babylonMesh.positions == null || babylonMesh.positions.Length == 0) { RaiseMessage("GLTFExporter.Mesh | Mesh is a dummy", 2); return(null); } RaiseMessage("GLTFExporter.Mesh | Mesh from babylon", 2); // Retreive general data from babylon mesh int nbVertices = babylonMesh.positions.Length / 3; bool hasUV = babylonMesh.uvs != null && babylonMesh.uvs.Length > 0; bool hasUV2 = babylonMesh.uvs2 != null && babylonMesh.uvs2.Length > 0; bool hasColor = babylonMesh.colors != null && babylonMesh.colors.Length > 0; bool hasBones = babylonMesh.matricesIndices != null && babylonMesh.matricesIndices.Length > 0; bool hasBonesExtra = babylonMesh.matricesIndicesExtra != null && babylonMesh.matricesIndicesExtra.Length > 0; bool hasTangents = babylonMesh.tangents != null && babylonMesh.tangents.Length > 0; RaiseMessage("GLTFExporter.Mesh | nbVertices=" + nbVertices, 3); RaiseMessage("GLTFExporter.Mesh | hasUV=" + hasUV, 3); RaiseMessage("GLTFExporter.Mesh | hasUV2=" + hasUV2, 3); RaiseMessage("GLTFExporter.Mesh | hasColor=" + hasColor, 3); RaiseMessage("GLTFExporter.Mesh | hasBones=" + hasBones, 3); RaiseMessage("GLTFExporter.Mesh | hasBonesExtra=" + hasBonesExtra, 3); // Retreive vertices data from babylon mesh List <GLTFGlobalVertex> globalVertices = new List <GLTFGlobalVertex>(); for (int indexVertex = 0; indexVertex < nbVertices; indexVertex++) { GLTFGlobalVertex globalVertex = new GLTFGlobalVertex(); globalVertex.Position = BabylonVector3.FromArray(babylonMesh.positions, indexVertex); globalVertex.Normal = BabylonVector3.FromArray(babylonMesh.normals, indexVertex); if (hasTangents) { globalVertex.Tangent = BabylonQuaternion.FromArray(babylonMesh.tangents, indexVertex); // Switch coordinate system at object level globalVertex.Tangent.Z *= -1; // Invert W to switch to right handed system globalVertex.Tangent.W *= -1; } // Switch coordinate system at object level globalVertex.Position.Z *= -1; globalVertex.Normal.Z *= -1; globalVertex.Position *= scaleFactor; if (hasUV) { globalVertex.UV = BabylonVector2.FromArray(babylonMesh.uvs, indexVertex); // For glTF, the origin of the UV coordinates (0, 0) corresponds to the upper left corner of a texture image // While for Babylon, it corresponds to the lower left corner of a texture image globalVertex.UV.Y = 1 - globalVertex.UV.Y; } if (hasUV2) { globalVertex.UV2 = BabylonVector2.FromArray(babylonMesh.uvs2, indexVertex); // For glTF, the origin of the UV coordinates (0, 0) corresponds to the upper left corner of a texture image // While for Babylon, it corresponds to the lower left corner of a texture image globalVertex.UV2.Y = 1 - globalVertex.UV2.Y; } if (hasColor) { globalVertex.Color = Tools.SubArrayFromEntity(babylonMesh.colors, indexVertex, 4); } if (hasBones) { // In babylon, the 4 bones indices are stored in a single int // Each bone index is 8-bit offset from the next int bonesIndicesMerged = babylonMesh.matricesIndices[indexVertex]; int bone3 = bonesIndicesMerged >> 24; bonesIndicesMerged -= bone3 << 24; int bone2 = bonesIndicesMerged >> 16; bonesIndicesMerged -= bone2 << 16; int bone1 = bonesIndicesMerged >> 8; bonesIndicesMerged -= bone1 << 8; int bone0 = bonesIndicesMerged >> 0; bonesIndicesMerged -= bone0 << 0; var bonesIndicesArray = new ushort[] { (ushort)bone0, (ushort)bone1, (ushort)bone2, (ushort)bone3 }; globalVertex.BonesIndices = bonesIndicesArray; globalVertex.BonesWeights = Tools.SubArrayFromEntity(babylonMesh.matricesWeights, indexVertex, 4); } globalVertices.Add(globalVertex); } var babylonMorphTargetManager = GetBabylonMorphTargetManager(babylonScene, babylonMesh); // Retreive indices from babylon mesh List <int> babylonIndices = babylonMesh.indices.ToList(); // -------------------------- // ------- Init glTF -------- // -------------------------- RaiseMessage("GLTFExporter.Mesh | Init glTF", 2); // Mesh var gltfMesh = new GLTFMesh { name = babylonMesh.name }; gltfMesh.index = gltf.MeshesList.Count; gltf.MeshesList.Add(gltfMesh); gltfMesh.idGroupInstance = babylonMesh.idGroupInstance; if (hasBones) { gltfMesh.idBabylonSkeleton = babylonMesh.skeletonId; } // -------------------------- // ---- glTF primitives ----- // -------------------------- RaiseMessage("GLTFExporter.Mesh | glTF primitives", 2); var meshPrimitives = new List <GLTFMeshPrimitive>(); foreach (BabylonSubMesh babylonSubMesh in babylonMesh.subMeshes) { // -------------------------- // ------ SubMesh data ------ // -------------------------- List <GLTFGlobalVertex> globalVerticesSubMesh = globalVertices.GetRange(babylonSubMesh.verticesStart, babylonSubMesh.verticesCount); var gltfIndices = babylonIndices.GetRange(babylonSubMesh.indexStart, babylonSubMesh.indexCount); // In gltf, indices of each mesh primitive are 0-based (ie: min value is 0) // Thus, the gltf indices list is a concatenation of sub lists all 0-based // Example for 2 triangles, each being a submesh: // babylonIndices = {0,1,2, 3,4,5} gives as result gltfIndicies = {0,1,2, 0,1,2} var minIndiceValue = gltfIndices.Min(); // Should be equal to babylonSubMesh.indexStart for (int indexIndice = 0; indexIndice < gltfIndices.Count; indexIndice++) { gltfIndices[indexIndice] -= minIndiceValue; } // -------------------------- // ----- Mesh primitive ----- // -------------------------- // MeshPrimitive var meshPrimitive = new GLTFMeshPrimitive { attributes = new Dictionary <string, int>() }; meshPrimitives.Add(meshPrimitive); // Material if (babylonMesh.materialId != null) { RaiseMessage("GLTFExporter.Mesh | Material", 3); // Retreive the babylon material BabylonMaterial babylonMaterial; var babylonMaterialId = babylonMesh.materialId; // From multi materials first, if any // Loop recursively even though it shouldn't be a real use case var babylonMultiMaterials = new List <BabylonMultiMaterial>(babylonScene.multiMaterials); BabylonMultiMaterial babylonMultiMaterial; do { babylonMultiMaterial = babylonMultiMaterials.Find(_babylonMultiMaterial => _babylonMultiMaterial.id == babylonMaterialId); if (babylonMultiMaterial != null) { babylonMaterialId = babylonMultiMaterial.materials[babylonSubMesh.materialIndex]; } }while (babylonMultiMaterial != null); // Then from materials var babylonMaterials = new List <BabylonMaterial>(babylonScene.materials); babylonMaterial = babylonMaterials.Find(_babylonMaterial => _babylonMaterial.id == babylonMaterialId); // If babylon material was exported successfully if (babylonMaterial != null) { // Update primitive material index var indexMaterial = babylonMaterialsToExport.FindIndex(_babylonMaterial => _babylonMaterial == babylonMaterial); if (indexMaterial == -1) { // Store material for exportation indexMaterial = babylonMaterialsToExport.Count; babylonMaterialsToExport.Add(babylonMaterial); } meshPrimitive.material = indexMaterial; } // TODO - Add and retreive info from babylon material meshPrimitive.mode = GLTFMeshPrimitive.FillMode.TRIANGLES; } // -------------------------- // ------- Accessors -------- // -------------------------- RaiseMessage("GLTFExporter.Mesh | Geometry", 3); // Buffer var buffer = GLTFBufferService.Instance.GetBuffer(gltf); // --- Indices --- var componentType = GLTFAccessor.ComponentType.UNSIGNED_SHORT; if (nbVertices >= 65536) { componentType = GLTFAccessor.ComponentType.UNSIGNED_INT; } var accessorIndices = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewScalar(gltf, buffer), "accessorIndices", componentType, GLTFAccessor.TypeEnum.SCALAR ); meshPrimitive.indices = accessorIndices.index; // Populate accessor if (componentType == GLTFAccessor.ComponentType.UNSIGNED_INT) { gltfIndices.ForEach(n => accessorIndices.bytesList.AddRange(BitConverter.GetBytes(n))); } else { var gltfIndicesShort = gltfIndices.ConvertAll(new Converter <int, ushort>(n => (ushort)n)); gltfIndicesShort.ForEach(n => accessorIndices.bytesList.AddRange(BitConverter.GetBytes(n))); } accessorIndices.count = gltfIndices.Count; // --- Positions --- var accessorPositions = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec3(gltf, buffer), "accessorPositions", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC3 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.POSITION.ToString(), accessorPositions.index); // Populate accessor accessorPositions.min = new float[] { float.MaxValue, float.MaxValue, float.MaxValue }; accessorPositions.max = new float[] { float.MinValue, float.MinValue, float.MinValue }; globalVerticesSubMesh.ForEach((globalVertex) => { var positions = globalVertex.Position.ToArray(); // Store values as bytes foreach (var position in positions) { accessorPositions.bytesList.AddRange(BitConverter.GetBytes(position)); } // Update min and max values GLTFBufferService.UpdateMinMaxAccessor(accessorPositions, positions); }); accessorPositions.count = globalVerticesSubMesh.Count; // --- Tangents --- if (hasTangents) { var accessorTangents = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec4(gltf, buffer), "accessorTangents", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC4 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.TANGENT.ToString(), accessorTangents.index); // Populate accessor List <float> tangents = globalVerticesSubMesh.SelectMany(v => v.Tangent.ToArray()).ToList(); tangents.ForEach(n => accessorTangents.bytesList.AddRange(BitConverter.GetBytes(n))); accessorTangents.count = globalVerticesSubMesh.Count; } // --- Normals --- var accessorNormals = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec3(gltf, buffer), "accessorNormals", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC3 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.NORMAL.ToString(), accessorNormals.index); // Populate accessor List <float> normals = globalVerticesSubMesh.SelectMany(v => v.Normal.ToArray()).ToList(); normals.ForEach(n => accessorNormals.bytesList.AddRange(BitConverter.GetBytes(n))); accessorNormals.count = globalVerticesSubMesh.Count; // --- Colors --- if (hasColor) { var accessorColors = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec4(gltf, buffer), "accessorColors", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC4 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.COLOR_0.ToString(), accessorColors.index); // Populate accessor List <float> colors = globalVerticesSubMesh.SelectMany(v => new[] { v.Color[0], v.Color[1], v.Color[2], v.Color[3] }).ToList(); colors.ForEach(n => accessorColors.bytesList.AddRange(BitConverter.GetBytes(n))); accessorColors.count = globalVerticesSubMesh.Count; } // --- UV --- if (hasUV) { var accessorUVs = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec2(gltf, buffer), "accessorUVs", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC2 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.TEXCOORD_0.ToString(), accessorUVs.index); // Populate accessor List <float> uvs = globalVerticesSubMesh.SelectMany(v => v.UV.ToArray()).ToList(); uvs.ForEach(n => accessorUVs.bytesList.AddRange(BitConverter.GetBytes(n))); accessorUVs.count = globalVerticesSubMesh.Count; } // --- UV2 --- if (hasUV2) { var accessorUV2s = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec2(gltf, buffer), "accessorUV2s", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC2 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.TEXCOORD_1.ToString(), accessorUV2s.index); // Populate accessor List <float> uvs2 = globalVerticesSubMesh.SelectMany(v => v.UV2.ToArray()).ToList(); uvs2.ForEach(n => accessorUV2s.bytesList.AddRange(BitConverter.GetBytes(n))); accessorUV2s.count = globalVerticesSubMesh.Count; } // --- Bones --- if (hasBones) { RaiseMessage("GLTFExporter.Mesh | Bones", 3); // --- Joints --- var accessorJoints = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewUnsignedShortVec4(gltf, buffer), "accessorJoints", GLTFAccessor.ComponentType.UNSIGNED_SHORT, GLTFAccessor.TypeEnum.VEC4 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.JOINTS_0.ToString(), accessorJoints.index); // Populate accessor List <ushort> joints = globalVerticesSubMesh.SelectMany(v => new[] { v.BonesIndices[0], v.BonesIndices[1], v.BonesIndices[2], v.BonesIndices[3] }).ToList(); joints.ForEach(n => accessorJoints.bytesList.AddRange(BitConverter.GetBytes(n))); accessorJoints.count = globalVerticesSubMesh.Count; // --- Weights --- var accessorWeights = GLTFBufferService.Instance.CreateAccessor( gltf, GLTFBufferService.Instance.GetBufferViewFloatVec4(gltf, buffer), "accessorWeights", GLTFAccessor.ComponentType.FLOAT, GLTFAccessor.TypeEnum.VEC4 ); meshPrimitive.attributes.Add(GLTFMeshPrimitive.Attribute.WEIGHTS_0.ToString(), accessorWeights.index); // Populate accessor List <float> weightBones = globalVerticesSubMesh.SelectMany(v => new[] { v.BonesWeights[0], v.BonesWeights[1], v.BonesWeights[2], v.BonesWeights[3] }).ToList(); weightBones.ForEach(n => accessorWeights.bytesList.AddRange(BitConverter.GetBytes(n))); accessorWeights.count = globalVerticesSubMesh.Count; } // Morph targets positions and normals if (babylonMorphTargetManager != null) { RaiseMessage("GLTFExporter.Mesh | Morph targets", 3); _exportMorphTargets(babylonMesh, babylonSubMesh, babylonMorphTargetManager, gltf, buffer, meshPrimitive); } } gltfMesh.primitives = meshPrimitives.ToArray(); // Morph targets weights if (babylonMorphTargetManager != null) { var weights = new List <float>(); foreach (BabylonMorphTarget babylonMorphTarget in babylonMorphTargetManager.targets) { weights.Add(babylonMorphTarget.influence); } gltfMesh.weights = weights.ToArray(); } return(gltfMesh); }
private bool TrySplit(LayoutTreeNode node, List <LayoutControlItemBase> items, bool vertical) { if (items.Count == 1) { node.Item = items[0]; return(true); } List <LayoutTree.BoundsEndpoint> boundsEndpointList = new List <LayoutTree.BoundsEndpoint>(); foreach (LayoutControlItemBase layoutControlItemBase in items) { boundsEndpointList.Add(new LayoutTree.BoundsEndpoint(vertical ? layoutControlItemBase.Bounds.Top : layoutControlItemBase.Bounds.Left, false, layoutControlItemBase)); boundsEndpointList.Add(new LayoutTree.BoundsEndpoint(vertical ? layoutControlItemBase.Bounds.Bottom : layoutControlItemBase.Bounds.Right, true, layoutControlItemBase)); } boundsEndpointList.Sort(new Comparison <LayoutTree.BoundsEndpoint>(this.CompareEndpoints)); List <LayoutControlItemBase> layoutControlItemBaseList = new List <LayoutControlItemBase>(); Dictionary <LayoutControlItemBase, object> dictionary = new Dictionary <LayoutControlItemBase, object>(); int num = -1; for (int index = 0; index < boundsEndpointList.Count; ++index) { if (!boundsEndpointList[index].IsEnd) { dictionary.Add(boundsEndpointList[index].Item, (object)null); } else { dictionary.Remove(boundsEndpointList[index].Item); layoutControlItemBaseList.Add(boundsEndpointList[index].Item); if (dictionary.Count == 0 && layoutControlItemBaseList.Count < items.Count && (num == -1 || (double)Math.Abs((float)num - (float)items.Count / 2f) > (double)Math.Abs((float)layoutControlItemBaseList.Count - (float)items.Count / 2f))) { num = layoutControlItemBaseList.Count; } } } if (num == -1) { return(false); } List <LayoutControlItemBase> range1 = layoutControlItemBaseList.GetRange(0, num); List <LayoutControlItemBase> range2 = layoutControlItemBaseList.GetRange(num, layoutControlItemBaseList.Count - num); node.SplitType = vertical ? Orientation.Vertical : Orientation.Horizontal; node.SplitPosition = vertical ? (float)range2[0].Bounds.Top : (float)range2[0].Bounds.Left; LayoutTreeNode node1 = new LayoutTreeNode(); node1.Parent = node; node1.Bounds = this.CalculateSplitBounds(node, true); LayoutTreeNode node2 = new LayoutTreeNode(); node2.Parent = node; node2.Bounds = this.CalculateSplitBounds(node, false); node.Left = node1; node.Right = node2; if (!this.TrySplit(node1, range1, true) && !this.TrySplit(node1, range1, false)) { this.FixInvalidLayout(node1, range1); } if (!this.TrySplit(node2, range2, true) && !this.TrySplit(node2, range2, false)) { this.FixInvalidLayout(node2, range2); } return(true); }
///<summary>Fills grid based on values in _listEtrans. ///Set isRefreshNeeded to true when we need to reinitialize local dictionarys after in memory list is also updated. Required true for first time running. ///Also allows you to passed in predetermined filter options.</summary> private void FillGrid(bool isRefreshNeeded, List <string> listSelectedStatuses, List <long> listSelectedClinicNums, string carrierName, string checkTraceNum, string amountMin, string amountMax) { Cursor = Cursors.WaitCursor; Action actionCloseProgress = null; if (isRefreshNeeded) { actionCloseProgress = ODProgressOld.ShowProgressStatus("Etrans835", this, Lan.g(this, "Gathering data") + "...", false); _dictEtrans835s.Clear(); _dictEtransClaims.Clear(); _dictClaimPayExists.Clear(); List <Etrans835Attach> listAttached = Etrans835Attaches.GetForEtrans(_listEtranss.Select(x => x.EtransNum).ToArray()); Dictionary <long, string> dictEtransMessages = new Dictionary <long, string>(); List <X12ClaimMatch> list835ClaimMatches = new List <X12ClaimMatch>(); Dictionary <long, int> dictClaimMatchCount = new Dictionary <long, int>(); //1:1 with _listEtranss. Stores how many claim matches each 835 has. int batchQueryInterval = 500; //Every 500 rows we get the next 500 message texts to save memory. int rowCur = 0; foreach (Etrans etrans in _listEtranss) { if (rowCur % batchQueryInterval == 0) { int range = Math.Min(batchQueryInterval, _listEtranss.Count - rowCur); //Either the full batchQueryInterval amount or the remaining amount of etrans. dictEtransMessages = EtransMessageTexts.GetMessageTexts(_listEtranss.GetRange(rowCur, range).Select(x => x.EtransMessageTextNum).ToList(), false); } rowCur++; ODEvent.Fire(new ODEventArgs("Etrans835", Lan.g(this, "Processing 835: ") + ": " + rowCur + " out of " + _listEtranss.Count)); List <Etrans835Attach> listAttachedTo835 = listAttached.FindAll(x => x.EtransNum == etrans.EtransNum); X835 x835 = new X835(etrans, dictEtransMessages[etrans.EtransMessageTextNum], etrans.TranSetId835, listAttachedTo835, true); _dictEtrans835s.Add(etrans.EtransNum, x835); List <X12ClaimMatch> listClaimMatches = x835.GetClaimMatches(); dictClaimMatchCount.Add(etrans.EtransNum, listClaimMatches.Count); list835ClaimMatches.AddRange(listClaimMatches); } #region Set 835 unattached in batch and build _dictEtransClaims and _dictClaimPayCheckNums. ODEvent.Fire(new ODEventArgs("Etrans835", Lan.g(this, "Gathering internal claim matches."))); List <long> listClaimNums = Claims.GetClaimFromX12(list835ClaimMatches); //Can return null. ODEvent.Fire(new ODEventArgs("Etrans835", Lan.g(this, "Building data sets."))); int claimIndexCur = 0; List <long> listMatchedClaimNums = new List <long>(); foreach (Etrans etrans in _listEtranss) { X835 x835 = _dictEtrans835s[etrans.EtransNum]; if (listClaimNums != null) { x835.SetClaimNumsForUnattached(listClaimNums.GetRange(claimIndexCur, dictClaimMatchCount[etrans.EtransNum])); } claimIndexCur += dictClaimMatchCount[etrans.EtransNum]; listMatchedClaimNums.AddRange(x835.ListClaimsPaid.FindAll(x => x.ClaimNum != 0).Select(x => x.ClaimNum).ToList()); } List <Claim> listClaims = Claims.GetClaimsFromClaimNums(listMatchedClaimNums.Distinct().ToList()); _dictClaimPayExists = ClaimPayments.HasClaimPayment(listMatchedClaimNums); //Every claim num is associated to a bool. True when there is an existing claimPayment. foreach (Etrans etrans in _listEtranss) { X835 x835 = _dictEtrans835s[etrans.EtransNum]; #region _dictEtransClaims, _dictClaimPayCheckNums _dictEtransClaims.Add(etrans.EtransNum, new List <Claim>()); List <long> listSubClaimNums = x835.ListClaimsPaid.FindAll(x => x.ClaimNum != 0).Select(y => y.ClaimNum).ToList(); List <Claim> listClaimsFor835 = listClaims.FindAll(x => listSubClaimNums.Contains(x.ClaimNum)); foreach (Hx835_Claim claim in x835.ListClaimsPaid) { Claim claimCur = listClaimsFor835.FirstOrDefault(x => x.ClaimNum == claim.ClaimNum); //Can be null. if (claimCur == null && claim.IsAttachedToClaim && claim.ClaimNum == 0) { claimCur = new Claim(); //Create empty claim since user detached claim manually, will not be considered in GetStringStatus(...). } if (claimCur != null && claim.IsPreauth) //User attached preauth to internal claim, no payment needed to be considered 'Finalized' in GetStringStatus(...). { _dictClaimPayExists[claim.ClaimNum] = true; } _dictEtransClaims[etrans.EtransNum].Add(claimCur); } #endregion } ODEvent.Fire(new ODEventArgs("Etrans835", Lan.g(this, "Filling Grid."))); #endregion } gridMain.BeginUpdate(); #region Initilize columns only once if (gridMain.Columns.Count == 0) { ODGridColumn col; col = new ODGridColumn(Lan.g("TableEtrans835s", "Patient Name"), 250); gridMain.Columns.Add(col); col = new ODGridColumn(Lan.g("TableEtrans835s", "Carrier Name"), 190); gridMain.Columns.Add(col); col = new ODGridColumn(Lan.g("TableEtrans835s", "Status"), 80); gridMain.Columns.Add(col); col = new ODGridColumn(Lan.g("TableEtrans835s", "Date"), 80); gridMain.Columns.Add(col); col = new ODGridColumn(Lan.g("TableEtrans835s", "Amount"), 80); gridMain.Columns.Add(col); if (PrefC.HasClinicsEnabled) { col = new ODGridColumn(Lan.g("TableEtrans835s", "Clinic"), 70); gridMain.Columns.Add(col); } col = new ODGridColumn(Lan.g("TableEtrans835s", "Code"), 37, HorizontalAlignment.Center); gridMain.Columns.Add(col); col = new ODGridColumn(Lan.g("TableEtrans835s", "Note"), 0); gridMain.Columns.Add(col); } #endregion gridMain.Rows.Clear(); foreach (Etrans etrans in _listEtranss) { X835 x835 = _dictEtrans835s[etrans.EtransNum]; #region Filter: Carrier Name if (carrierName != "" && !x835.PayerName.ToLower().Contains(carrierName.ToLower())) { continue; } #endregion string status = GetStringStatus(etrans.EtransNum); #region Filter: Status if (!listSelectedStatuses.Contains(status.Replace("*", ""))) //The filter will ignore finalized with detached claims. { continue; } #endregion //List of ClinicNums for the current etrans.ListClaimsPaid from the DB. List <long> listClinicNums = _dictEtransClaims[etrans.EtransNum].Select(x => x == null? 0 :x.ClinicNum).Distinct().ToList(); #region Filter: Clinics if (PrefC.HasClinicsEnabled && !listClinicNums.Exists(x => listSelectedClinicNums.Contains(x))) { continue; //The ClinicNums associated to the 835 do not match any of the selected ClinicNums, so nothing to show in this 835. } #endregion #region Filter: Check and Trace Value if (checkTraceNum != "" && !x835.TransRefNum.Contains(checkTraceNum)) //Trace Number does not match { continue; } #endregion #region Filter: Insurance Check Range Min and Max if (amountMin != "" && x835.InsPaid < PIn.Decimal(amountMin) || amountMax != "" && x835.InsPaid > PIn.Decimal(amountMax)) { continue; //Either the InsPaid is below or above our range. } #endregion ODGridRow row = new ODGridRow(); #region Column: Patient Name List <string> listPatNames = x835.ListClaimsPaid.Select(x => x.PatientName.ToString()).Distinct().ToList(); string patName = (listPatNames.Count > 0 ? listPatNames[0] : ""); if (listPatNames.Count > 1) { patName = "(" + POut.Long(listPatNames.Count) + ")"; } row.Cells.Add(patName); #endregion row.Cells.Add(x835.PayerName); row.Cells.Add(status); //See GetStringStatus(...) for possible values. row.Cells.Add(POut.Date(etrans.DateTimeTrans)); row.Cells.Add(POut.Decimal(x835.InsPaid)); #region Column: Clinic if (PrefC.HasClinicsEnabled) { string clinicAbbr = ""; if (listClinicNums.Count == 1) { if (listClinicNums[0] == 0) { clinicAbbr = Lan.g(this, "Unassigned"); } else { clinicAbbr = Clinics.GetAbbr(listClinicNums[0]); } } else if (listClinicNums.Count > 1) { clinicAbbr = "(" + Lan.g(this, "Multiple") + ")"; } row.Cells.Add(clinicAbbr); } #endregion row.Cells.Add(x835._paymentMethodCode); row.Cells.Add(etrans.Note); row.Tag = etrans; gridMain.Rows.Add(row); } gridMain.EndUpdate(); try { actionCloseProgress?.Invoke(); //When this function executes quickly this can fail rarely, fail silently because of WaitCursor. } catch (Exception ex) { ex.DoNothing(); } Cursor = Cursors.Default; }
static void Main(string[] args) { // Create a list of planets containing mercury and mars. List <string> planetList = new List <string> () { "Mercury", "Mars" }; // Add jupiter and saturn to the end of planetList. planetList.Add("Jupiter"); planetList.Add("Saturn"); // Create a second planet list containing nepture and uranus. List <string> planetListTwo = new List <string> () { "Neptune", "Uranus" }; // Insert Venus and Earth into planetList at index 1 and 2. planetList.Insert(1, "Venus"); planetList.Insert(2, "Earth"); // Add planetListTwo at the end of planetList planetList.AddRange(planetListTwo); // Add pluto the the end of the planet list planetList.Add("Pluto"); // Select the first four indexes from planetList // Assign those four planets to the list called rockyPlanets List <string> rockyPlanets = planetList.GetRange(0, 4); // Remove the planet at index 8 (pluto) from planetList planetList.RemoveAt(8); // Create list containing a dictionary named probes List <Dictionary <string, string> > probes = new List <Dictionary <string, string> > (); // Create a dictionaries for each probe to hold key value pairs Dictionary <string, string> viking = new Dictionary <string, string> () { { "Mars", "Viking" }, { "Jupiter", "Viking" } }; Dictionary <string, string> opportunity = new Dictionary <string, string> () { { "Mars", "Opportunity" } }; Dictionary <string, string> trident = new Dictionary <string, string> () { { "Neptune", "Trident" } }; // Add the above dictionaries to the probes dictionary probes.Add(viking); probes.Add(opportunity); probes.Add(trident); // Iterate planets foreach (string planet in planetList) { // Create a list named matchingProbes List <string> matchingProbes = new List <string>(); // Iterate probes foreach (Dictionary <string, string> probe in probes) { // Link key value pairs of dictionary probes to planets list by the dictionary keys if (probe.ContainsKey(planet)) { // Add the probes that match planets to the list matchingProbes matchingProbes.Add(probe[planet]); } } // If there are no probes that match a planet, do not list the planet. if (matchingProbes.Count > 0) { // Write to console the planets and their matching probes, // joining the probes with a comma and space. Console.WriteLine($"{planet}: {String.Join(", ", matchingProbes)}"); } } }
public void playPile(List <Card> cards) { this.PlayPileTopCard = cards.GetRange(0, 1); cards.RemoveRange(0, 1); this.PlayPile = cards; }
void ConvertBody(List <ILNode> body, int startPos, int bodyLength, List <KeyValuePair <ILLabel, StateRange> > labels) { newBody = new List <ILNode>(); newBody.Add(MakeGoTo(labels, 0)); List <SetState> stateChanges = new List <SetState>(); int currentState = -1; // Copy all instructions from the old body to newBody. for (int pos = startPos; pos < bodyLength; pos++) { ILExpression expr = body[pos] as ILExpression; if (expr != null && expr.Code == ILCode.Stfld && expr.Arguments[0].MatchThis()) { // Handle stores to 'state' or 'current' if (GetFieldDefinition(expr.Operand as FieldReference) == stateField) { if (expr.Arguments[1].Code != ILCode.Ldc_I4) { throw new SymbolicAnalysisFailedException(); } currentState = (int)expr.Arguments[1].Operand; stateChanges.Add(new SetState(newBody.Count, currentState)); } else if (GetFieldDefinition(expr.Operand as FieldReference) == currentField) { newBody.Add(new ILExpression(ILCode.YieldReturn, null, expr.Arguments[1])); } else { newBody.Add(body[pos]); } } else if (returnVariable != null && expr != null && expr.Code == ILCode.Stloc && expr.Operand == returnVariable) { // handle store+branch to the returnVariable ILExpression br = body.ElementAtOrDefault(++pos) as ILExpression; if (br == null || !(br.Code == ILCode.Br || br.Code == ILCode.Leave) || br.Operand != returnLabel || expr.Arguments[0].Code != ILCode.Ldc_I4) { throw new SymbolicAnalysisFailedException(); } int val = (int)expr.Arguments[0].Operand; if (val == 0) { newBody.Add(new ILExpression(ILCode.YieldBreak, null)); } else if (val == 1) { newBody.Add(MakeGoTo(labels, currentState)); } else { throw new SymbolicAnalysisFailedException(); } } else if (expr != null && expr.Code == ILCode.Ret) { if (expr.Arguments.Count != 1 || expr.Arguments[0].Code != ILCode.Ldc_I4) { throw new SymbolicAnalysisFailedException(); } // handle direct return (e.g. in release builds) int val = (int)expr.Arguments[0].Operand; if (val == 0) { newBody.Add(new ILExpression(ILCode.YieldBreak, null)); } else if (val == 1) { newBody.Add(MakeGoTo(labels, currentState)); } else { throw new SymbolicAnalysisFailedException(); } } else if (expr != null && expr.Code == ILCode.Call && expr.Arguments.Count == 1 && expr.Arguments[0].MatchThis()) { MethodDefinition method = GetMethodDefinition(expr.Operand as MethodReference); if (method == null) { throw new SymbolicAnalysisFailedException(); } StateRange stateRange; if (method == disposeMethod) { // Explicit call to dispose is used for "yield break;" within the method. ILExpression br = body.ElementAtOrDefault(++pos) as ILExpression; if (br == null || !(br.Code == ILCode.Br || br.Code == ILCode.Leave) || br.Operand != returnFalseLabel) { throw new SymbolicAnalysisFailedException(); } newBody.Add(new ILExpression(ILCode.YieldBreak, null)); } else if (finallyMethodToStateRange.TryGetValue(method, out stateRange)) { // Call to Finally-method int index = stateChanges.FindIndex(ss => stateRange.Contains(ss.NewState)); if (index < 0) { throw new SymbolicAnalysisFailedException(); } ILLabel label = new ILLabel(); label.Name = "JumpOutOfTryFinally" + stateChanges[index].NewState; newBody.Add(new ILExpression(ILCode.Leave, label)); SetState stateChange = stateChanges[index]; // Move all instructions from stateChange.Pos to newBody.Count into a try-block stateChanges.RemoveRange(index, stateChanges.Count - index); // remove all state changes up to the one we found ILTryCatchBlock tryFinally = new ILTryCatchBlock(); tryFinally.TryBlock = new ILBlock(newBody.GetRange(stateChange.NewBodyPos, newBody.Count - stateChange.NewBodyPos)); newBody.RemoveRange(stateChange.NewBodyPos, newBody.Count - stateChange.NewBodyPos); // remove all nodes that we just moved into the try block tryFinally.CatchBlocks = new List <ILTryCatchBlock.CatchBlock>(); tryFinally.FinallyBlock = ConvertFinallyBlock(method); newBody.Add(tryFinally); newBody.Add(label); } } else { newBody.Add(body[pos]); } } newBody.Add(new ILExpression(ILCode.YieldBreak, null)); }
public void GetRangeValidations(T[] items) { // //Always send items.Length is even // List <T> list = new List <T>(items); int[] bad = new int[] { /**/ items.Length, 1, /**/ items.Length + 1, 0, /**/ items.Length + 1, 1, /**/ items.Length, 2, /**/ items.Length / 2, items.Length / 2 + 1, /**/ items.Length - 1, 2, /**/ items.Length - 2, 3, /**/ 1, items.Length, /**/ 0, items.Length + 1, /**/ 1, items.Length + 1, /**/ 2, items.Length, /**/ items.Length / 2 + 1, items.Length / 2, /**/ 2, items.Length - 1, /**/ 3, items.Length - 2 }; for (int i = 0; i < bad.Length; i++) { AssertExtensions.Throws <ArgumentException>(null, () => list.GetRange(bad[i], bad[++i])); //"ArgumentException expected." } bad = new int[] { /**/ -1, -1, /**/ -1, 0, /**/ -1, 1, /**/ -1, 2, /**/ 0, -1, /**/ 1, -1, /**/ 2, -1 }; for (int i = 0; i < bad.Length; i++) { Assert.Throws <ArgumentOutOfRangeException>(() => list.GetRange(bad[i], bad[++i])); //"ArgumentOutOfRangeException expected." } }
public List <Card> getFlop() { return(onBoardCards.GetRange(0, 3)); }
static void Main(string[] args) { Console.WriteLine("Roll die?"); Console.WriteLine("1.Yes"); Console.WriteLine("2.No"); string Ctn = Console.ReadLine(); int YN; int.TryParse(Ctn, out YN); while (YN == 1) { Random random = new System.Random(); int RanNum = random.Next(1, 6); Console.WriteLine("Your roll is:"); Console.WriteLine(RanNum); rolls.Add(RanNum); Console.WriteLine("Roll again?"); Ctn = Console.ReadLine(); int.TryParse(Ctn, out YN); } Console.WriteLine("Anything else?"); Console.WriteLine("1.Show all rolls"); Console.WriteLine("2.Show older rolls"); Console.WriteLine("3.Add rolls together"); Console.WriteLine("4.Show avarage of rolls"); string FnlSlc = Console.ReadLine(); int Fnl; int.TryParse(FnlSlc, out Fnl); if (Fnl == 1) { Console.WriteLine("Your rolls were:"); foreach (int i in rolls) { Console.WriteLine(i); } } if (Fnl == 2) { Console.WriteLine("enter amount of rolls to find"); string FndR = Console.ReadLine(); int Fnd; int.TryParse(FndR, out Fnd); string OldRol = rolls.GetRange(Index: 0, count: Fnd); Console.WriteLine("Those rolls were:"); } if (Fnl == 3) { Console.WriteLine("Your roll total is:"); int sum = 0; foreach (int i in rolls) { sum += i; } Console.WriteLine(sum); } if (Fnl == 4) { Console.WriteLine("Your roll average is:"); int sum = 0; int avg = 0; foreach (int i in rolls) { sum += i; avg = sum / i; } Console.WriteLine(avg); } }
// Do the work of combining meshes public static GameObject CombineMeshes(List <GameObject> objects) { List <MeshFilter> meshes = new List <MeshFilter>(); List <GameObject> originalObjects = new List <GameObject>(); // Loop over all selected GameObjects foreach (var o in Selection.objects) { var go = o as GameObject; if (go == null) { Debug.LogError("Selected object is not a GameObject!"); return(null); } // Collect the MeshFilters in each GameObject var filters = go.GetComponentsInChildren <MeshFilter>(); if (filters.Length > 0) { meshes.AddRange(filters); } else { Debug.LogError("Selected object " + go.name + " does not have a MeshFilter!"); return(null); } originalObjects.Add(go); } // Only run if we found MeshFilters from selection if (meshes.Count > 0) { // Data structure to store source information as we go List <string> names = new List <string>(); List <GameObject> toDestroy = new List <GameObject>(); Dictionary <Material, MaterialAndCombines> combinesByMaterial = new Dictionary <Material, MaterialAndCombines>(); // Loop over all found MeshFilters for (int i = 0; i < meshes.Count; i++) { var filter = meshes[i]; // If there is only one submesh, no need to break apart first if (filter.sharedMesh.subMeshCount == 1) { DoCombine(filter, combinesByMaterial); } else { // We need to isolate all submeshes into discreet Mesh objects. // Use the BreakMeshIntoBits function to create temporary GameObjects with one mesh each var newObjects = BreakMesh(filter.gameObject); foreach (var o in newObjects) { DoCombine(o, combinesByMaterial); // We need to clean these temp object up later toDestroy.Add(o.gameObject); } } // Add the parent GO name, so we can add to the new GO's name later names.Add(filter.gameObject.name); } // For each bucket, combine meshes of the same material into a single mesh Dictionary <Material, Mesh> submeshes = new Dictionary <Material, Mesh>(); foreach (var pair in combinesByMaterial) { var mesh = new Mesh(); var list = pair.Value.list; mesh.CombineMeshes(list.ToArray(), true, false, false); // 2nd arg means to actually "weld" meshes submeshes[pair.Value.mat] = mesh; foreach (var combine in list) { GameObject.DestroyImmediate(combine.mesh); } } // Create the uber mesh that will hold our merged submeshes Mesh newParentMesh = new Mesh(); List <Material> matList = new List <Material>(); List <CombineInstance> combineForParent = new List <CombineInstance>(); // For each of our newly combined submeshes, we need to create a CombineInstance to be added to the final parent foreach (var pair in submeshes) { CombineInstance c = new CombineInstance(); c.mesh = pair.Value; combineForParent.Add(c); matList.Add(pair.Key); } // Final combine into uber mesh newParentMesh.CombineMeshes(combineForParent.ToArray(), false, false, false); // Create a new GO that with a MeshFilter that will reference our new uber mesh var nameParts = names.GetRange(0, Mathf.Min(4, names.Count)).ToArray(); var newObject = new GameObject("Combined: " + string.Join(",", nameParts) + (nameParts.Length < names.Count ? "..." : "")); var mf = newObject.AddComponent <MeshFilter>(); newParentMesh.name = newObject.name; mf.mesh = newParentMesh; // Create the uber mesh's MeshRenderer, and add the material for each new submesh var mr = newObject.AddComponent <MeshRenderer>(); mr.materials = matList.ToArray(); mf.sharedMesh.RecalculateBounds(); // Make sure the new mesh is at the same location as the old mesh var newCenter = mr.bounds.center; var newVertices = mf.sharedMesh.vertices; for (int i = 0; i < newVertices.Length; i++) { newVertices[i] -= newCenter; // Ofset the verts in the mesh to the original location } mf.sharedMesh.vertices = newVertices; mf.sharedMesh.RecalculateBounds(); newObject.transform.position = newCenter; // If there was only one parent GameObject that we combined, parent the new GO to the same place in the hierarchy if (Selection.objects.Length == 1) { GameObject singleObject = Selection.objects[0] as GameObject; newObject.transform.parent = singleObject.transform.parent; } else { // We don't know how to choose where to parent something that came from lots of meshes, so leave the new GO at the root } // Deactivate the old mesh objects foreach (var o in originalObjects) { o.SetActive(false); } // Destroy the temp GOs we created when we broke complex meshes into bits foreach (var o in toDestroy) { GameObject.DestroyImmediate(o); } return(newObject); } return(null); }
private void bExportDocuments_Click(object sender, EventArgs ea) { if (treeView1.SelectedNode == null || treeView1.SelectedNode.Nodes.Count > 0) { MessageBox.Show("Select a database."); return; } int total = 0; long startTicks = 0; long lastTicks = 0; string timeLeft = ""; string timeElapsed = "0:00:00"; string databasePath = treeView1.SelectedNode.Name; ProgressDialog pDialog = new ProgressDialog(); pDialog.Title = "Exporting Documents"; #region Export Documents pDialog.DoWork += delegate(object dialog, DoWorkEventArgs e) { try { //export documents NotesSession nSession = initSession(notesPassword); Dictionary <string, Table> tables = new Dictionary <string, Table>(); NotesDatabase db; if (onLocalComputer) { db = nSession.GetDatabase("", databasePath, false); } else { db = nSession.GetDatabase(notesServer + "//" + notesDomain, databasePath, false); } //get all documents total = db.AllDocuments.Count; NotesDocumentCollection allDocuments = db.AllDocuments; NotesDocument doc = allDocuments.GetFirstDocument(); startTicks = DateTime.Now.Ticks; for (int i = 0; i < total; i++) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } if (doc.HasItem("Form") && (string)doc.GetItemValue("Form")[0] != "") { //get form string form = (string)doc.GetItemValue("Form")[0]; if (!tables.ContainsKey(form)) { tables.Add(form, new Table(form)); } int row = tables[form].AddRow(); //get fields //set multiple values foreach (NotesItem item in doc.Items) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } string field = item.Name; //exclude fields that start with $ and the Form field and Readers field if (excludeField.IsMatch(field)) { continue; } string type = ""; switch (item.type) {//TODO: get more types case IT_TYPE.NUMBERS: type = "decimal(20,10)"; break; case IT_TYPE.DATETIMES: type = "datetime"; break; default: type = "text"; break; } object values = item.Values; bool multiple = item.Values.Length > 1; if (!tables[form].Columns.ContainsKey(field)) { tables[form].Columns.Add(field, new Column(field, type)); } if (multiple && !tables[form].Columns[field].MultipleValues) { tables[form].Columns[field].MultipleValues = multiple; } if (!tables[form].Columns[field].Values.ContainsKey(row)) { tables[form].Columns[field].Values.Add(row, values); } else { int j = 1; while (tables[form].Columns.ContainsKey(field + j) && tables[form].Columns[field + j].Values.ContainsKey(row)) { j++; } field += j; if (!tables[form].Columns.ContainsKey(field)) { tables[form].Columns.Add(field, new Column(field, type)); } if (multiple && !tables[form].Columns[field].MultipleValues) { tables[form].Columns[field].MultipleValues = multiple; } tables[form].Columns[field].Values.Add(row, values); } } } //update progress pDialog.ReportProgress(i, "Parsing Documents"); doc = allDocuments.GetNextDocument(doc); } //add tables for columns with multiple values Dictionary <string, Table> newTables = new Dictionary <string, Table>(tables.Count); lastTicks = 0; startTicks = DateTime.Now.Ticks; total = tables.Count; int count = 0; foreach (Table table in tables.Values) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } pDialog.ReportProgress(++count, "Formatting Tables"); Dictionary <string, Column> columns = new Dictionary <string, Column>(table.Columns); foreach (Column column in columns.Values) { if (column.MultipleValues) { string tableName = table.Name + "_" + column.Name; Table newTable = new Table(tableName, table.Name); Column values = new Column(column.Name, column.Type); Column ids = new Column(table.Name + "id", "int"); foreach (KeyValuePair <int, object> cell in column.Values) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } int id = cell.Key; object[] valueArray; if (cell.Value.GetType().IsArray) { valueArray = (object[])cell.Value; } else { valueArray = new object[] { cell.Value }; } foreach (object value in valueArray) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } int row = newTable.AddRow(); ids.Values.Add(row, id); values.Values.Add(row, value); } } newTable.Columns.Add(table.Name + "id", ids); newTable.Columns.Add(column.Name, values); newTables.Add(tableName, newTable); table.Columns.Remove(column.Name); } else { Dictionary <int, object> values = new Dictionary <int, object>(column.Values); foreach (KeyValuePair <int, object> cell in values) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } int id = cell.Key; object value; if (cell.Value.GetType().IsArray) { if (((object[])cell.Value).Length > 0) { value = ((object[])cell.Value)[0]; } else { value = null; } } else { value = cell.Value; } column.Values[id] = value; } } } newTables.Add(table.Name, table); } //format to sql total = newTables.Count; bool complete = false; do { lastTicks = 0; count = 0; DialogResult result = DialogResult.Cancel; Invoke((MethodInvoker) delegate() { result = MessageBox.Show(pDialog.Window, "Do you want to export to a MySQL server?", "Export to a server?", MessageBoxButtons.YesNoCancel, MessageBoxIcon.Question, MessageBoxDefaultButton.Button1); }); if (result == DialogResult.Yes) { InputBox input = null; Invoke((MethodInvoker) delegate() { input = InputBox.Show(pDialog.Window, "SQL server info?", new InputBoxItem[] { new InputBoxItem("Server", mysqlServer), new InputBoxItem("Database", mysqlDatabase), new InputBoxItem("Username", mysqlUsername), new InputBoxItem("Password", mysqlPassword, true), new InputBoxItem("Number of rows per INSERT", mysqlNumRowsPerInsert.ToString()) }, InputBoxButtons.OKCancel); }); if (input.Result == InputBoxResult.OK) { mysqlServer = input.Items["Server"]; mysqlDatabase = input.Items["Database"]; mysqlUsername = input.Items["Username"]; mysqlPassword = input.Items["Password"]; int.TryParse(input.Items["Number of rows per INSERT"], out mysqlNumRowsPerInsert); MySqlConnection conn = new MySqlConnection("SERVER=" + mysqlServer + ";USERNAME="******";PASSWORD="******";"); try { startTicks = DateTime.Now.Ticks; conn.Open(); MySqlCommand command = conn.CreateCommand(); command.CommandText = createDatabase(mysqlDatabase); command.ExecuteNonQuery(); foreach (Table table in newTables.Values) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } pDialog.ReportProgress(++count, "Inserting SQL"); if (table.Columns.Count > 0) { command.CommandText = createTable(table); command.ExecuteNonQuery(); List <string> rows = insertTableRows(table); for (int i = 0; i < rows.Count; i += mysqlNumRowsPerInsert) { command.CommandText = beginInsertTable(table); command.CommandText += String.Join(",", rows.GetRange(i, Math.Min(rows.Count - i, mysqlNumRowsPerInsert))) + ";\n"; command.CommandText += endInsertTable(table); command.ExecuteNonQuery(); pDialog.ReportProgress(count, "Inserting SQL"); } } } command.CommandText = restoreVariables(); command.ExecuteNonQuery(); complete = true; } catch (Exception ex) { MessageBox.Show(ex.Message); } finally { conn.Close(); } } } else if (result == DialogResult.No) { saveFileDialog1.FileName = "export.sql"; result = DialogResult.Cancel; Invoke((MethodInvoker) delegate() { result = saveFileDialog1.ShowDialog(pDialog.Window); }); if (result == DialogResult.OK) { InputBox input = null; Invoke((MethodInvoker) delegate() { input = InputBox.Show(pDialog.Window, "Database name?", "Database Name", mysqlDatabase, InputBoxButtons.OKCancel); }); if (input.Result == InputBoxResult.OK) { mysqlDatabase = input.Items["Database Name"]; StreamWriter file = new StreamWriter(saveFileDialog1.FileName, false); try { startTicks = DateTime.Now.Ticks; file.WriteLine(createDatabase(mysqlDatabase)); foreach (Table table in newTables.Values) { //check if cancelled if (pDialog.IsCancelled) { e.Cancel = true; return; } pDialog.ReportProgress(++count, "Formatting SQL"); if (table.Columns.Count > 0) { file.WriteLine(createTable(table)); file.WriteLine(beginInsertTable(table)); file.WriteLine(String.Join(",", insertTableRows(table)) + ";"); file.WriteLine(endInsertTable(table)); } } file.WriteLine(restoreVariables()); complete = true; } catch (Exception ex) { MessageBox.Show(ex.ToString()); } finally { file.Close(); } } } } else { e.Cancel = true; return; } } while (!complete); } catch (Exception ex) { MessageBox.Show(ex.ToString()); e.Cancel = true; } }; #endregion pDialog.ProgressChanged += delegate(object dialog, ProgressChangedEventArgs e) { if (lastTicks == 0) { lastTicks = DateTime.Now.Ticks; timeLeft = "Calculating..."; } else if (e.ProgressPercentage > 0 && DateTime.Now.Ticks > lastTicks + 10000000) { lastTicks = DateTime.Now.Ticks; long ticksPassed = lastTicks - startTicks; long thingsCompleted = e.ProgressPercentage; long thingsLeft = total - thingsCompleted; long ticks = thingsLeft * ticksPassed / thingsCompleted; timeLeft = ticksToString(ticks); timeElapsed = ticksToString(ticksPassed); } pDialog.Message = e.UserState.ToString() + ": " + e.ProgressPercentage + "/" + total + " Time Remaining: " + timeLeft + " Time Elapsed: " + timeElapsed; if (total == 0) { pDialog.Progress = 0; } else { pDialog.Progress = (100 * e.ProgressPercentage / total) % 101; } }; pDialog.Completed += delegate(object dialog, RunWorkerCompletedEventArgs e) { if (!e.Cancelled) { MessageBox.Show("Completed Successfully"); } }; pDialog.Run(); }
/// <summary> /// if there are elements in the construction queue, then it tries constructing them /// </summary> public void TryBuilding() { if (constructionList.Count != 0) { lock(constructionList) { InQueue(this, new MyEventArgs(position, currentID, positionIncrementation, constructionList.GetRange(0, constructionList.Count))); this.position += constructionList.Count*positionIncrementation; this.currentID += constructionList.Count; constructionList.Clear(); } } }
public static List <Byte> compressType01(List <Byte> input) { List <Byte> output = new List <Byte>(); List <Byte> bufferRep = new List <Byte>(); List <Byte> bufferNRe = new List <Byte>(); if (input.Count == 0) { output.Add(0); return(output); } int candidate = -1; foreach (Byte item in input) { /* Choose list to insert in */ if (item == candidate) { /* Repeated byte */ /* Delete the last item from */ if (bufferNRe.Count > 0 && bufferNRe.Last() == item) { bufferNRe.RemoveAt(bufferNRe.Count - 1); } bufferRep.Add(item); /* Send bufferNRe to output if bufferRep > 2 */ if (bufferRep.Count > 2 && bufferNRe.Count > 0) { output.Add(BitConverter.GetBytes(bufferNRe.Count)[0]); output.AddRange(bufferNRe); bufferNRe.Clear(); } } else { /* Non repeated byte */ if (bufferRep.Count < 3 && bufferRep.Count > 0) { /* Insert items in NRe */ bufferNRe.AddRange(bufferRep); bufferRep.Clear(); } else if (bufferRep.Count > 3) { /* Insert items in output */ output.Add(BitConverter.GetBytes(bufferRep.Count + 0x80)[0]); output.Add(bufferRep[0]); bufferRep.Clear(); } bufferNRe.Add(item); } candidate = item; /* 0x7F elements reached */ if (bufferRep.Count == 0x7F) { output.Add(BitConverter.GetBytes(bufferRep.Count + 0x80)[0]); output.Add(bufferRep[0]); bufferRep.Clear(); candidate = -1; } else if (bufferNRe.Count >= 0x7F) { output.Add(BitConverter.GetBytes(Math.Min(0x7F, bufferNRe.Count))[0]); output.AddRange(bufferNRe.GetRange(0, 0x7F)); bufferNRe.RemoveRange(0, 0x0F); } } /* All items read. Flush buffer elements */ if (bufferNRe.Count > 0) { /* Add bufferRep to bufferNRe */ bufferNRe.AddRange(bufferRep); bufferRep.Clear(); /* Write bufferNRe in output */ output.Add(BitConverter.GetBytes(Math.Min(0x7F, bufferNRe.Count))[0]); output.AddRange(bufferNRe.GetRange(0, Math.Min(0x7F, bufferNRe.Count))); bufferNRe.RemoveRange(0, Math.Min(0x7F, bufferNRe.Count)); /* Write bufferNRe again if some elements remains */ if (bufferNRe.Count > 0) { output.Add(BitConverter.GetBytes(bufferNRe.Count)[0]); output.AddRange(bufferNRe); bufferNRe.Clear(); } } else if (bufferRep.Count > 0) { /* Write buffer rep */ output.Add(BitConverter.GetBytes(bufferRep.Count + 0x80)[0]); output.Add(bufferRep[0]); bufferRep.Clear(); } /* Return compressed data */ return(output); }
/// <summary> /// Supprime toutes les modifications apportées à la bdd depuis et y compris la création du Client sur Invitation /// </summary> /// <param name="clientNouveau">Client qui a été créé en répondant à une Invitation</param> /// <returns>RetourDeService d'un ClientEtatVue contenant un Client identique à celui que l'Invitation invitait à gérer s'il y en avait un, null sinon</returns> public new async Task <RetourDeService <ClientEtatVue> > Supprime(Client clientNouveau) { Client rétabli = null; ClientEtatVue vue = null; List <ArchiveClient> archives = await _context.ArchiveClient .Where(a => a.Id == clientNouveau.Id) .OrderBy(a => a.Date) .ToListAsync(); // index de l'archive ayant enregiistré le int indexCréation = archives.FindIndex(a => a.Etat == EtatRole.Nouveau); if (indexCréation != 0) { // le compte existait avant le rattachement au client, il faut le rétablir rétabli = new Client { Id = clientNouveau.Id, SiteId = clientNouveau.SiteId }; // date du dernier changement d'état à fixer à partir des archives DateTime dateEtat = archives.ElementAt(0).Date; // Fixe les champs du Role à rétablir avec les champs non nuls de l'archive // Si l'archive a un Etat fixe la date de changement d'état ArchiveClient rétablitArchive(ArchiveClient archive) { Client.CopieDataSiPasNull(archive, rétabli); ArchiveClient archiveRétablie = new ArchiveClient { Id = clientNouveau.Id }; Client.CopieData(archive, archiveRétablie); archiveRétablie.Date = archive.Date; if (archive.Etat != null) { rétabli.Etat = archive.Etat.Value; dateEtat = archive.Date; } return(archiveRétablie); } // transforme un RetourDeService avec erreur en RetourDeService<ClientEtatVue> avec la même erreur RetourDeService <ClientEtatVue> transformeErreur(RetourDeService retour) { // on copie l'erreur dans RetourDeService de ClientEtatVue RetourDeService <ClientEtatVue> retourVue = new RetourDeService <ClientEtatVue>(retour.Type); if (retour.IdentityError) { retourVue.Objet = retour.Objet; } return(retourVue); } // on doit attribuer au client créé les archives antérieures au passage à l'état nouveau // et rétablir ses champs en fonction de ces archives List <ArchiveClient> archivesRétablies = archives .GetRange(0, indexCréation) .Select(a => rétablitArchive(a)) .ToList(); // on ajoute seulement à la table Role _context.Client.Add(rétabli); // il faut sauvegarder pour pouvoir ajouter les élément dépendants RetourDeService retour = await SaveChangesAsync(); if (!retour.Ok) { return(transformeErreur(retour)); } vue = new ClientEtatVue { Id = rétabli.Id, Etat = EtatRole.Actif, DateEtat = dateEtat }; Client.CopieData(rétabli, vue); // on ajoute les archives _context.ArchiveClient.AddRange(archivesRétablies); // date du passage à l'état nouveau DateTime dateCréation = archives.ElementAt(indexCréation).Date; // on doit attribuer au client créé les documents et les lignes du client créés avant le passage à l'état nouveau List <DocCLF> anciensDocs = await _context.Docs .Where(d => d.Id == clientNouveau.Id && d.Date < dateCréation) .ToListAsync(); // s'il n'y a pas de documents, il n'y a rien à réattribuer if (anciensDocs.Count != 0) { List <LigneCLF> anciennesLignes = await _context.Lignes .Where(l => l.Id == clientNouveau.Id && anciensDocs.Where(d => d.No == l.No).Any()) .ToListAsync(); // s'il n'y a pas de lignes, il n'y a rien à réattribuer if (anciennesLignes.Count != 0) { vue.AvecDocuments = true; List <DocCLF> nouveauxDocs = anciensDocs .Select(d => DocCLF.Clone(rétabli.Id, d)) .ToList(); List <LigneCLF> nouvellesLignes = anciennesLignes .Select(l => LigneCLF.Clone(rétabli.Id, l)) .ToList(); _context.Docs.AddRange(nouveauxDocs); retour = await SaveChangesAsync(); if (retour.Ok) { _context.Lignes.AddRange(nouvellesLignes); retour = await SaveChangesAsync(); } if (!retour.Ok) { return(transformeErreur(retour)); } } } } _context.Client.Remove(clientNouveau); return(await SaveChangesAsync(vue)); }
private SortedList <double, Point> MergeSort(Point p0, List <Point> points) { if (points.Count == 1) { return(new SortedList <double, Point> { { GeometryUtils.AngleRad(p0, points[0]), points[0] } }); } var middle = points.Count / 2; var leftPoints = points.GetRange(0, middle); var rightPoints = points.GetRange(middle, points.Count - middle); var leftSortedPoints = MergeSort(p0, leftPoints); var rightSortedPoints = MergeSort(p0, rightPoints); var leftptr = leftSortedPoints.Count - 1; var rightptr = rightSortedPoints.Count - 1; var sortedPoints = new SortedList <double, Point>(); for (var i = leftSortedPoints.Count + rightSortedPoints.Count; i > 0; i--) { if (leftptr == -1) { var el = rightSortedPoints.ElementAt(rightptr); // TODO: measure distance if same key exists sortedPoints.Add(el.Key, el.Value); rightptr--; } else if (rightptr == -1) { var el = leftSortedPoints.ElementAt(leftptr); // TODO: measure distance if same key exists sortedPoints.Add(el.Key, el.Value); leftptr--; } else if (leftSortedPoints.ElementAt(leftptr).Key < rightSortedPoints.ElementAt(rightptr).Key) { var el = leftSortedPoints.ElementAt(leftptr); // TODO: measure distance if same key exists sortedPoints.Add(el.Key, el.Value); leftptr--; } else { var el = rightSortedPoints.ElementAt(rightptr); // TODO: measure distance if same key exists sortedPoints.Add(el.Key, el.Value); rightptr--; } } return(sortedPoints); }
private void BackgroundWorker_DoWork(object sender, DoWorkEventArgs e) { var backgroundWorker = sender as BackgroundWorker; var packListCount = packList.Count; float i = 0; List <string> modFileLines = new List <string>(File.ReadAllLines(Properties.Settings.Default.Modlist_Directory)); try { using (ZipArchive archive = ZipFile.OpenRead(packPath)) { backgroundWorker.ReportProgress(0, "Opening TTMP Data File...\n"); foreach (var entry in archive.Entries) { if (entry.FullName.EndsWith(".mpd")) { var stream = entry.Open(); var remainingPack = packListCount; var currentPack = 0; var prevPack = 0; long newOffset = 0; long offsetSum = 0; List <ModPackItems> pack; long cursor = 0; while (currentPack != packListCount) { prevPack = currentPack; if (remainingPack > 100) { pack = packList.GetRange(currentPack, 100); currentPack += 100; remainingPack -= 100; } else { pack = packList.GetRange(currentPack, remainingPack); currentPack += remainingPack; } backgroundWorker.ReportProgress((int)((i / packListCount) * 100), $"\nReading Entries ({prevPack} - {currentPack}/{packListCount})\n\n"); long totalSize = 0; var modPackBytes = new List <byte>(); foreach (var p in pack) { if (p.mEntry.ModOffset < cursor) { backgroundWorker.ReportProgress((int)((i / packListCount) * 100), $"There was an warning in importing. \nImproper Mod Offset in ModPack for {p.mEntry.Name}. \nUnable to import {p.mEntry.Name}."); continue; } totalSize += p.mEntry.ModSize; var buf = new byte[p.mEntry.ModSize]; while (p.mEntry.ModOffset > cursor) { cursor++; stream.ReadByte(); //seek forward for next offset } stream.Read(buf, 0, buf.Length); cursor += buf.Length; modPackBytes.AddRange(buf); } var uncompBytes = modPackBytes.ToArray(); offsetSum += newOffset; newOffset = totalSize; using (var ms = new MemoryStream(uncompBytes)) { //backgroundWorker.ReportProgress((int)((i / packListCount) * 100), "Reading TTMP Data...\n"); var dataOffset = 0; using (var br = new BinaryReader(ms)) { //backgroundWorker.ReportProgress((int)((i / packListCount) * 100), "Begining Import...\n"); foreach (var mpi in pack) { currentImport = mpi.Name + "...."; backgroundWorker.ReportProgress((int)((i / packListCount) * 100), currentImport); JsonEntry modEntry = null; bool inModList = false; bool overwrite = false; int lineNum = 0; int originalOffset = 0; int offset = 0; byte[] dataBytes = new byte[mpi.mEntry.ModSize]; List <byte> modDataList = new List <byte>(); br.BaseStream.Seek(dataOffset, SeekOrigin.Begin); modDataList.AddRange(br.ReadBytes(mpi.mEntry.ModSize)); try { foreach (var line in modFileLines) { modEntry = JsonConvert.DeserializeObject <JsonEntry>(line); if (modEntry.fullPath.Equals(mpi.mEntry.FullPath)) { inModList = true; break; } lineNum++; } var datNum = int.Parse(Info.ModDatDict[mpi.mEntry.DatFile]); var modDatPath = string.Format(Info.datDir, mpi.mEntry.DatFile, datNum); var fileLength = new FileInfo(modDatPath).Length; while (fileLength >= 2000000000) { datNum += 1; modDatPath = string.Format(Info.datDir, mpi.mEntry.DatFile, datNum); if (!File.Exists(modDatPath)) { CreateDat.MakeNewDat(mpi.mEntry.DatFile); } fileLength = new FileInfo(modDatPath).Length; } //is in modlist and size of new mod is less than or equal to existing mod size if (inModList && mpi.mEntry.ModSize <= modEntry.modSize) { int sizeDiff = modEntry.modSize - modDataList.Count; datNum = ((modEntry.modOffset / 8) & 0x0F) / 2; modDatPath = string.Format(Info.datDir, modEntry.datFile, datNum); var datOffsetAmount = 16 * datNum; using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(modDatPath))) { bw.BaseStream.Seek(modEntry.modOffset - datOffsetAmount, SeekOrigin.Begin); bw.Write(modDataList.ToArray()); bw.Write(new byte[sizeDiff]); } Helper.UpdateIndex(modEntry.modOffset, mpi.mEntry.FullPath, mpi.mEntry.DatFile); Helper.UpdateIndex2(modEntry.modOffset, mpi.mEntry.FullPath, mpi.mEntry.DatFile); offset = modEntry.modOffset; overwrite = true; } if (!overwrite) { using (BinaryWriter bw = new BinaryWriter(File.OpenWrite(modDatPath))) { bw.BaseStream.Seek(0, SeekOrigin.End); while ((bw.BaseStream.Position & 0xFF) != 0) { bw.Write((byte)0); } int eof = (int)bw.BaseStream.Position + modDataList.Count; while ((eof & 0xFF) != 0) { modDataList.AddRange(new byte[16]); eof = eof + 16; } var datOffsetAmount = 16 * datNum; offset = (int)bw.BaseStream.Position + datOffsetAmount; if (offset != 0) { bw.Write(modDataList.ToArray()); } else { FlexibleMessageBox.Show("There was an issue obtaining the .dat4 offset to write data to, try importing again. " + "\n\n If the problem persists, please submit a bug report.", "ImportModel Error " + Info.appVersion, MessageBoxButtons.OK, MessageBoxIcon.Error); return; } } int oldOffset = Helper.UpdateIndex(offset, mpi.mEntry.FullPath, mpi.mEntry.DatFile) * 8; Helper.UpdateIndex2(offset, mpi.mEntry.FullPath, mpi.mEntry.DatFile); //is in modlist and size of new mod is larger than existing mod size if (inModList && mpi.mEntry.ModSize > modEntry.modSize) { oldOffset = modEntry.originalOffset; JsonEntry replaceEntry = new JsonEntry() { category = String.Empty, name = String.Empty, fullPath = String.Empty, originalOffset = 0, modOffset = modEntry.modOffset, modSize = modEntry.modSize, datFile = mpi.mEntry.DatFile }; modFileLines[lineNum] = JsonConvert.SerializeObject(replaceEntry); File.WriteAllLines(Properties.Settings.Default.Modlist_Directory, modFileLines); } JsonEntry jsonEntry = new JsonEntry() { category = mpi.Category, name = mpi.Name, fullPath = mpi.mEntry.FullPath, originalOffset = oldOffset, modOffset = offset, modSize = mpi.mEntry.ModSize, datFile = mpi.mEntry.DatFile }; try { modFileLines.Add(JsonConvert.SerializeObject(jsonEntry)); File.WriteAllLines(Properties.Settings.Default.Modlist_Directory, modFileLines); } catch (Exception ex) { FlexibleMessageBox.Show("Error Accessing .modlist File \n" + ex.Message, "ImportModel Error " + Info.appVersion, MessageBoxButtons.OK, MessageBoxIcon.Error); } } } catch (Exception ex) { FlexibleMessageBox.Show("There was an error in importing. \n" + ex.Message, "ImportModPack Error " + Info.appVersion, MessageBoxButtons.OK, MessageBoxIcon.Error); Debug.WriteLine(ex.StackTrace); } i++; backgroundWorker.ReportProgress((int)((i / packListCount) * 100), "Done."); dataOffset += mpi.mEntry.ModSize; } } } } stream.Dispose(); stream.Close(); } } } } catch (Exception ex) { FlexibleMessageBox.Show("Error opening TexTools ModPack file. \n" + ex.Message, "ImportModPack Error " + Info.appVersion, MessageBoxButtons.OK, MessageBoxIcon.Error); Debug.WriteLine(ex.StackTrace); } }
private static int CountMatchingCharacters(List <char> leftRange, List <char> rightRange, bool lastLine) { if (CompareCancelled) { return(0); } FindLongestMatch(leftRange, rightRange, out int matchIndex, out int matchingIndex, out int matchLength); if (lastLine) { if (matchLength == 0) { return(0); } } else { if (matchLength < AppSettings.CharacterMatchThreshold) { return(0); } } if (matchIndex > 0 && matchingIndex > 0) { matchLength += CountMatchingCharacters(leftRange.GetRange(0, matchIndex), rightRange.GetRange(0, matchingIndex), lastLine); } if (leftRange.Count > matchIndex + matchLength && rightRange.Count > matchingIndex + matchLength) { matchLength += CountMatchingCharacters(leftRange.GetRange(matchIndex + matchLength, leftRange.Count - (matchIndex + matchLength)), rightRange.GetRange(matchingIndex + matchLength, rightRange.Count - (matchingIndex + matchLength)), lastLine); } return(matchLength); }
public List <T5> GetRange(int index, int count) { return(list.GetRange(index, count)); }
private async Task <string> GenerateSkillsHtml(string token, long inspectCharId, int page) { var data = await APIHelper.ESIAPI.GetCharSkills(Reason, inspectCharId, token); if (data == null) { return(null); } await data.PopulateNames(); var skillGroups = data.skills.GroupBy(a => a.DB_Group, (key, value) => new { ID = key, Value = value.ToList() }); var skills = new List <JsonClasses.SkillEntry>(); foreach (var skillGroup in skillGroups) { skills.Add(new JsonClasses.SkillEntry { DB_Name = skillGroup.Value[0].DB_GroupName }); skills.AddRange(skillGroup.Value.OrderBy(a => a.DB_Name)); } var max = Settings.HRMModule.TableSkillEntriesPerPage * 3; var startIndex = (page - 1) * max; skills = skills.GetRange(startIndex, skills.Count > startIndex + max ? max : (skills.Count - startIndex)); var sb = new StringBuilder(); sb.AppendLine("<thead>"); sb.AppendLine("<tr>"); sb.AppendLine($"<th scope=\"col-md-auto\">#</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillName")}</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillLevel")}</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillName")}</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillLevel")}</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillName")}</th>"); sb.AppendLine($"<th scope=\"col-md-auto\">{LM.Get("hrmSkillLevel")}</th>"); sb.AppendLine("</tr>"); sb.AppendLine("</thead>"); sb.AppendLine("<tbody>"); var counter = startIndex + 1; var rowEntryCounter = 1; var maxRowEntries = 3; for (var index = 0; index < skills.Count; index++) { var entry = skills[index]; var bgcolor = GetSkillCellColor(entry.trained_skill_level, string.IsNullOrEmpty(entry.DB_GroupName)); if (rowEntryCounter == 1) { sb.AppendLine("<tr>"); } //group if (string.IsNullOrEmpty(entry.DB_GroupName)) { if (rowEntryCounter > 1) { for (int i = rowEntryCounter; i < maxRowEntries; i++) { sb.AppendLine(" <td></td>"); sb.AppendLine(" <td></td>"); } sb.AppendLine("</tr>"); sb.AppendLine("<tr>"); rowEntryCounter = 1; } sb.AppendLine($" <th scope=\"row\"></th>"); sb.AppendLine($" <td><b>{entry.DB_Name}</b></td>"); sb.AppendLine($" <td></td>"); sb.AppendLine($" <td></td>"); sb.AppendLine($" <td></td>"); sb.AppendLine($" <td></td>"); sb.AppendLine($" <td></td>"); sb.AppendLine("</tr>"); continue; } //item if (rowEntryCounter == 1) { sb.AppendLine($" <th bgcolor=\"{bgcolor}\" scope=\"row\">{counter++}</th>"); } sb.AppendLine($" <td bgcolor=\"{bgcolor}\">{entry.DB_Name}</td>"); sb.AppendLine($" <td bgcolor=\"{bgcolor}\">{entry.trained_skill_level}</td>"); rowEntryCounter++; if (rowEntryCounter > maxRowEntries) { sb.AppendLine("</tr>"); rowEntryCounter = 1; } } sb.AppendLine("</tbody>"); return(sb.ToString()); }
/// <summary> /// Run the code example. /// </summary> public void Run(DfpUser user) { using (PublisherQueryLanguageService pqlService = (PublisherQueryLanguageService)user.GetService(DfpService.v201805 .PublisherQueryLanguageService)) { // Create statement to select recent changes. Change_History only supports ordering // by descending ChangeDateTime. Offset is not supported. To page, use the change ID // of the earliest change as a pagination token. A date time range is required when // querying this table. System.DateTime endDateTime = System.DateTime.Now; System.DateTime startDateTime = endDateTime.AddDays(-1); StatementBuilder statementBuilder = new StatementBuilder() .Select("Id, ChangeDateTime, EntityId, EntityType, Operation, UserId") .From("Change_History") .Where("ChangeDateTime < :endDateTime AND ChangeDateTime > :startDateTime") .OrderBy("ChangeDateTime DESC") .AddValue("startDateTime", DateTimeUtilities.FromDateTime(startDateTime, "America/New_York")) .AddValue("endDateTime", DateTimeUtilities.FromDateTime(endDateTime, "America/New_York")) .Limit(StatementBuilder.SUGGESTED_PAGE_LIMIT); int resultSetSize = 0; List <Row> allRows = new List <Row>(); ResultSet resultSet; do { resultSet = pqlService.select(statementBuilder.ToStatement()); if (resultSet.rows != null && resultSet.rows.Length > 0) { // Get the earliest change ID in the result set. Row lastRow = resultSet.rows[resultSet.rows.Length - 1]; string lastId = (string)PqlUtilities.GetValue(lastRow.values[0]); // Collect all changes from each page. allRows.AddRange(resultSet.rows); // Display results. Console.WriteLine(PqlUtilities.ResultSetToString(resultSet)); // Use the earliest change ID in the result set to page. statementBuilder .Where("Id < :id AND ChangeDateTime < :endDateTime AND " + "ChangeDateTime > :startDateTime").AddValue("id", lastId); } resultSetSize = resultSet.rows == null ? 0 : resultSet.rows.Length; } while (resultSetSize == StatementBuilder.SUGGESTED_PAGE_LIMIT); Console.WriteLine("Number of results found: " + allRows.Count); // Optionally, save all rows to a CSV. // Get a string array representation of the data rows. resultSet.rows = allRows.ToArray(); List <String[]> rows = PqlUtilities.ResultSetToStringArrayList(resultSet); // Write the contents to a csv file. CsvFile file = new CsvFile(); file.Headers.AddRange(rows[0]); file.Records.AddRange(rows.GetRange(1, rows.Count - 1).ToArray()); file.Write("recent_changes_" + this.GetTimeStamp() + ".csv"); } }
public ActionResult GetComments(long id, string typeComment, int count = 5, int list = 1) { try { if (typeComment == "Place") { List <CommentPlace> comments = _commentPlaceService.GetItemList().Where(comment => comment.PlaceId == id).ToList(); List <CommentModels> commentModels = new List <CommentModels>(); foreach (var comment in comments) { CommentModels commentModel = new CommentModels(); commentModel.Id = comment.Id; commentModel.TargetId = comment.PlaceId; commentModel.Assessment = comment.Assessment; commentModel.Text = comment.Text; commentModel.Rating = comment.Rating; commentModel.Picture = comment.Pictures.FirstOrDefault(); commentModel.Author = comment.Author; commentModel.TypeComment = "Place"; commentModels.Add(commentModel); if (commentModels.Count >= count * list) { var index = count * list - count; commentModels = commentModels.GetRange(index, count); return(PartialView(commentModels)); } } if (commentModels.Count == 0) { return(Json(0, JsonRequestBehavior.AllowGet)); } else { return(PartialView(commentModels)); } } if (typeComment == "Track") { List <CommentTrack> comments = _commentTrackService.GetItemList().Where(comment => comment.TrackId == id) .ToList(); List <CommentModels> commentModels = new List <CommentModels>(); foreach (var comment in comments) { CommentModels commentModel = new CommentModels(); commentModel.Id = comment.Id; commentModel.TargetId = comment.TrackId; commentModel.Assessment = comment.Assessment; commentModel.Text = comment.Text; commentModel.Rating = comment.Rating; commentModel.Picture = comment.Pictures.FirstOrDefault(); commentModel.Author = comment.Author; commentModel.TypeComment = "Track"; commentModels.Add(commentModel); if (commentModels.Count >= count * list) { var index = count * list - count; commentModels = commentModels.GetRange(index, count); return(PartialView(commentModels)); } } if (commentModels.Count == 0) { return(Json(0, JsonRequestBehavior.AllowGet)); } else { return(PartialView(commentModels)); } } else { return(Json(0, JsonRequestBehavior.AllowGet)); } } catch (Exception e) { return(View("Error")); } }
public override CRMUser CreateUser(string userName, string email, Guid providerUserKey) { Assert.ArgumentNotNullOrEmpty(userName, "userName"); Assert.ArgumentNotNullOrEmpty(email, "email"); const string CreateUserKey = "createUser"; ConditionalLog.Info(String.Format("CreateUser({0}, {1}, {2}). Started.", userName, email, providerUserKey), this, TimerAction.Start, CreateUserKey); var properties = new List <Property>(); if (Configuration.Settings.UniqueKeyProperty != "fullname") { properties.Add(new StringProperty { Name = Configuration.Settings.UniqueKeyProperty, Value = userName }); } else { var nameParts = new List <string>(userName.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries)); properties.Add(new StringProperty { Name = "firstname", Value = nameParts[0] }); if (nameParts.Count > 1) { properties.Add(new StringProperty { Name = "lastname", Value = String.Join(" ", nameParts.GetRange(1, nameParts.Count - 1).ToArray()) }); } } if ((email != null) && (Configuration.Settings.UniqueKeyProperty != "emailaddress1")) { properties.Add(new StringProperty { Name = "emailaddress1", Value = email }); } if (providerUserKey != Guid.Empty) { properties.Add(new KeyProperty { Name = "contactid", Value = new Key { Value = providerUserKey } }); } var contact = new DynamicEntity(); contact.Name = EntityName.contact.ToString(); contact.Properties = properties.ToArray(); CRMUser user = null; try { providerUserKey = this.CrmService.Create(contact); ConditionalLog.Info(String.Format("CreateUser({0}, {1}, {2}). User has been created in CRM.", userName, email, providerUserKey), this, TimerAction.Tick, CreateUserKey); user = new CRMUser( userName, providerUserKey, email, null, String.Empty, true, false, DateTime.Now, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue, DateTime.MinValue); this.CacheService.UserCache.Add(user); } catch (Exception e) { ConditionalLog.Error(String.Format("Couldn't create user {0} in CRM.", userName), e, this); } ConditionalLog.Info(String.Format("CreateUser({0}, {1}, {2}). Finished.", userName, email, providerUserKey), this, TimerAction.Stop, CreateUserKey); return(user); }
public void ReplaceText(int startLine, int startColumn, int endLine, int endColumn, string text) { System.Drawing.Point cursor = new Point(endColumn, endLine); if (endLine >= lineList.Count) { endLine = lineList.Count - 1; endColumn = lineList[lineList.Count - 1].Length; } if (startLine < 0 || (startLine >= lineList.Count)) { return; } if (endLine < 0 || (endLine >= lineList.Count)) { return; } // The simple rule of text buffer modification: if the end of selection // includes "\n" at the end of the final selected line, then it should // be moved to include the subsequent line. The reason for this is that // there is a possibility this "\n" will be removed as part of editing // (imagine the case where cursor is placed at the end of the line, and // user hits <Delete> key, then the "\n" is removed causing the line to // be merged with the next line), and we really want to include the // next line into undo recording for restoration later. // if (endLine != lineList.Count - 1) // If we're not already at the end. { // If at the end of selection is the "\n" character... if (endColumn != 0 && lineList[endLine][endColumn - 1] == '\n') { endLine = endLine + 1; endColumn = 0; } } int startLineCount = lineList.Count; string resultString = string.Empty; string firstLine = lineList[startLine]; if (startColumn > firstLine.Length) { startColumn = firstLine.Length; } resultString = firstLine.Substring(0, startColumn) + text; string finalLine = lineList[endLine]; if (endColumn < finalLine.Length) { resultString += finalLine.Substring(endColumn, finalLine.Length - endColumn); } List <string> originalContents = new List <string>(); if (endLine >= startLine) { originalContents.AddRange(lineList.GetRange(startLine, endLine - startLine + 1)); lineList.RemoveRange(startLine, endLine - startLine + 1); if (string.IsNullOrEmpty(resultString) == false) { lineList.Insert(startLine, resultString); } } if (UndoGroupNeedsClosing(startLine, Operation.Replace)) { undoRedoRecorder.CloseUndoGroup(); } int resultLineCount = LineCountFromString(resultString); undoRedoRecorder.RecordInsertion(startLine, resultLineCount, originalContents, cursor); if (resultLineCount != originalContents.Count) { undoRedoRecorder.CloseUndoGroup(); } ArrangeTextBuffer(); if (null != LinesUpdated) { // Notify listeners of line changes. int lineCount = endLine - startLine; LinesUpdated(this, new LinesUpdateEventArgs(startLine, lineCount, lineList.Count - startLineCount)); } }
private static void MatchPartialLines(List <Line> leftRange, List <Line> rightRange) { int matchingCharacters; float bestMatchFraction = 0; float matchFraction; int bestLeft = 0; int bestRight = 0; bool lastLine = leftRange.Count == 1 || rightRange.Count == 1; for (int leftIndex = 0; leftIndex < leftRange.Count; leftIndex++) { if (bestMatchFraction == 1) { break; } if (leftRange[leftIndex].IsWhitespaceLine) { continue; } for (int rightIndex = 0; rightIndex < rightRange.Count; rightIndex++) { if (rightRange[rightIndex].IsWhitespaceLine) { continue; } matchingCharacters = CountMatchingCharacters(leftRange[leftIndex].TrimmedCharacters, rightRange[rightIndex].TrimmedCharacters, lastLine); matchFraction = (float)matchingCharacters * 2 / (leftRange[leftIndex].TrimmedCharacters.Count + rightRange[rightIndex].TrimmedCharacters.Count); if (matchFraction > bestMatchFraction) { bestMatchFraction = matchFraction; bestLeft = leftIndex; bestRight = rightIndex; if (bestMatchFraction == 1) { break; } } } } if (bestMatchFraction > AppSettings.LineSimilarityThreshold || leftRange[bestLeft].TrimmedText == rightRange[bestRight].TrimmedText) { leftRange[bestLeft].MatchingLineIndex = rightRange[bestRight].LineIndex; rightRange[bestRight].MatchingLineIndex = leftRange[bestLeft].LineIndex; leftRange[bestLeft].Type = TextState.PartialMatch; rightRange[bestRight].Type = TextState.PartialMatch; if (leftRange[bestLeft].GetHashCode() == rightRange[bestRight].GetHashCode()) { leftRange[bestLeft].Type = TextState.FullMatch; rightRange[bestRight].Type = TextState.FullMatch; } else { leftRange[bestLeft].TextSegments.Clear(); rightRange[bestRight].TextSegments.Clear(); HighlightCharacterMatches(leftRange[bestLeft], rightRange[bestRight], leftRange[bestLeft].Characters, rightRange[bestRight].Characters); } if (bestLeft > 0 && bestRight > 0) { MatchPartialLines(leftRange.GetRange(0, bestLeft), rightRange.GetRange(0, bestRight)); } if (leftRange.Count > bestLeft + 1 && rightRange.Count > bestRight + 1) { MatchPartialLines(leftRange.GetRange(bestLeft + 1, leftRange.Count - (bestLeft + 1)), rightRange.GetRange(bestRight + 1, rightRange.Count - (bestRight + 1))); } } }
/// <summary> /// Implementation of the test itself. /// </summary> /// <returns>true if the test passes, false otherwise.</returns> public override async Task <bool> RunAsync() { IPAddress ServerIp = (IPAddress)ArgumentValues["Server IP"]; int PrimaryPort = (int)ArgumentValues["primary Port"]; int BasePort = (int)ArgumentValues["Base Port"]; int LocPort = (int)ArgumentValues["LOC Port"]; log.Trace("(ServerIp:'{0}',PrimaryPort:{1},BasePort:{2},LocPort:{3})", ServerIp, PrimaryPort, BasePort, LocPort); bool res = false; Passed = false; ProtocolClient client = new ProtocolClient(); ProfileServer profileServer = null; LocServer locServer = null; try { MessageBuilder mb = client.MessageBuilder; // Step 1 log.Trace("Step 1"); // Get port list. await client.ConnectAsync(ServerIp, PrimaryPort, false); Dictionary <ServerRoleType, uint> rolePorts = new Dictionary <ServerRoleType, uint>(); bool listPortsOk = await client.ListServerPorts(rolePorts); client.CloseConnection(); // Create identities. int profileNumber = 0; byte[] imageData = File.ReadAllBytes(Path.Combine("images", TestName + ".png")); Dictionary <string, ProtocolClient> expectedLastClients = new Dictionary <string, ProtocolClient>(StringComparer.Ordinal); List <string> excessClientNames = new List <string>(); for (int i = 0; i < 20050; i++) { ProtocolClient protocolClient = new ProtocolClient(); protocolClient.InitializeRandomProfile(profileNumber, imageData); profileNumber++; if (i >= 19990) { protocolClient.Profile.Type = "last"; if (i < 20000) { expectedLastClients.Add(protocolClient.Profile.Name, protocolClient); } else { excessClientNames.Add(protocolClient.Profile.Name); } } TestProfiles.Add(protocolClient.Profile.Name, protocolClient); } // Start simulated profile server. profileServer = new ProfileServer("TestProfileServer", ServerIp, BasePort, client.GetIdentityKeys(), new GpsLocation(1, 2)); bool profileServerStartOk = profileServer.Start(); // Start simulated LOC server. locServer = new LocServer("TestLocServer", ServerIp, LocPort); bool locServerStartOk = locServer.Start(); await locServer.WaitForProfileServerConnectionAsync(); bool step1Ok = profileServerStartOk && locServerStartOk; log.Trace("Step 1: {0}", step1Ok ? "PASSED" : "FAILED"); // Step 2 log.Trace("Step 2"); // Announce new neighbor. Iop.Locnet.NeighbourhoodChange change = new Iop.Locnet.NeighbourhoodChange() { AddedNodeInfo = profileServer.GetNodeInfo(LocPort) }; bool changeNotificationOk = await locServer.SendChangeNotification(change); // Wait for start of neighborhood initialization process. IncomingServerMessage incomingServerMessage = await profileServer.WaitForConversationRequest(ServerRole.ServerNeighbor, ConversationRequest.RequestTypeOneofCase.StartNeighborhoodInitialization); // Send update. bool statusOk = false; bool updateOk = true; int profilesSent = 0; List <ProtocolClient> allProfiles = new List <ProtocolClient>(TestProfiles.Values); List <ProtocolClient> profilesToSend = allProfiles.GetRange(0, 19990); while (profilesToSend.Count > 0) { int batchSize = Math.Min(Rng.Next(100, 150), profilesToSend.Count); List <SharedProfileUpdateItem> updateItems = new List <SharedProfileUpdateItem>(); foreach (ProtocolClient pc in profilesToSend.GetRange(0, batchSize)) { updateItems.Add(pc.GetSharedProfileUpdateAddItem()); } profilesToSend.RemoveRange(0, batchSize); Message updateRequest = await profileServer.SendNeighborhoodSharedProfileUpdateRequest(incomingServerMessage.Client, updateItems); profilesSent += batchSize; incomingServerMessage = await profileServer.WaitForResponse(ServerRole.ServerNeighbor, updateRequest); statusOk = incomingServerMessage.IncomingMessage.Response.Status == Status.Ok; bool batchOk = (updateRequest != null) && statusOk; if (!batchOk) { updateOk = false; break; } } // Finish neighborhood initialization process. Message finishRequest = await profileServer.SendFinishNeighborhoodInitializationRequest(incomingServerMessage.Client); incomingServerMessage = await profileServer.WaitForResponse(ServerRole.ServerNeighbor, finishRequest); statusOk = incomingServerMessage.IncomingMessage.Response.Status == Status.Ok; bool finishOk = (finishRequest != null) && statusOk; bool step2Ok = changeNotificationOk && updateOk && finishOk; log.Trace("Step 2: {0}", step2Ok ? "PASSED" : "FAILED"); // Step 3 log.Trace("Step 3"); await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.SrNeighbor], true); bool verifyIdentityOk = await client.VerifyIdentityAsync(); List <SharedProfileUpdateItem> badUpdateItems = new List <SharedProfileUpdateItem>(); foreach (ProtocolClient pc in allProfiles.GetRange(19990, 60)) { badUpdateItems.Add(pc.GetSharedProfileUpdateAddItem()); } Message requestMessage = mb.CreateNeighborhoodSharedProfileUpdateRequest(badUpdateItems); await client.SendMessageAsync(requestMessage); Message responseMessage = await client.ReceiveMessageAsync(); bool idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.ErrorInvalidValue; bool detailsOk = responseMessage.Response.Details == "10.add"; bool badUpdateOk = idOk && statusOk && detailsOk; client.CloseConnection(); // Step 3 Acceptance bool step3Ok = verifyIdentityOk && badUpdateOk; log.Trace("Step 3: {0}", step3Ok ? "PASSED" : "FAILED"); // Step 4 log.Trace("Step 4"); // Start conversation. await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.ClNonCustomer], true); bool startConversationOk = await client.StartConversationAsync(); HashSet <byte[]> expectedCoveredServers = new HashSet <byte[]>(StructuralEqualityComparer <byte[]> .Default) { client.GetIdentityId(), Crypto.Sha256(client.ServerKey) }; // Search all profiles with type "last". requestMessage = mb.CreateProfileSearchRequest("last", null, null, null, 0, 100, 100); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.Ok; bool totalRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.TotalRecordCount == 10; bool maxResponseRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.MaxResponseRecordCount == 100; bool profilesCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.Count == 10; bool resultsOk = client.CheckProfileListMatchSearchResultItems(expectedLastClients, responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.ToList(), false, false, client.GetIdentityId(), true); bool queryOk = idOk && statusOk && totalRecordCountOk && maxResponseRecordCountOk && profilesCountOk && resultsOk; client.CloseConnection(); // Step 4 Acceptance bool step4Ok = startConversationOk && queryOk; log.Trace("Step 4: {0}", step4Ok ? "PASSED" : "FAILED"); // Step 5 log.Trace("Step 5"); // Make TestProfiles reflect the status on the target profile server. foreach (string excessClientName in excessClientNames) { TestProfiles[excessClientName].Dispose(); TestProfiles.Remove(excessClientName); } await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.SrNeighbor], true); verifyIdentityOk = await client.VerifyIdentityAsync(); // Select 140 profiles for deletion. List <SharedProfileUpdateItem> deleteUpdateItems = new List <SharedProfileUpdateItem>(); while (deleteUpdateItems.Count < 140) { int index = Rng.Next(TestProfiles.Count); ProtocolClient pc = TestProfiles.ElementAt(index).Value; deleteUpdateItems.Add(pc.GetSharedProfileUpdateDeleteItem()); if (expectedLastClients.ContainsKey(pc.Profile.Name)) { expectedLastClients.Remove(pc.Profile.Name); } TestProfiles.Remove(pc.Profile.Name); pc.Dispose(); } // Send delete update. requestMessage = mb.CreateNeighborhoodSharedProfileUpdateRequest(deleteUpdateItems); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.Ok; bool deleteUpdateOk = idOk && statusOk; // Generate 160 new identities. badUpdateItems.Clear(); excessClientNames.Clear(); for (int i = 0; i < 160; i++) { ProtocolClient protocolClient = new ProtocolClient(); protocolClient.InitializeRandomProfile(profileNumber, imageData); profileNumber++; protocolClient.Profile.Type = "last"; if (TestProfiles.Count < 20000) { expectedLastClients.Add(protocolClient.Profile.Name, protocolClient); } else { excessClientNames.Add(protocolClient.Profile.Name); } TestProfiles.Add(protocolClient.Profile.Name, protocolClient); badUpdateItems.Add(protocolClient.GetSharedProfileUpdateAddItem()); } // Add the new profiles to the profile server. requestMessage = mb.CreateNeighborhoodSharedProfileUpdateRequest(badUpdateItems); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.ErrorInvalidValue; detailsOk = responseMessage.Response.Details == "140.add"; badUpdateOk = idOk && statusOk && detailsOk; client.CloseConnection(); // Step 5 Acceptance bool step5Ok = verifyIdentityOk && deleteUpdateOk && badUpdateOk; log.Trace("Step 5: {0}", step5Ok ? "PASSED" : "FAILED"); // Step 6 log.Trace("Step 6"); // Make TestProfiles reflect the status on the target profile server. foreach (string excessClientName in excessClientNames) { TestProfiles[excessClientName].Dispose(); TestProfiles.Remove(excessClientName); } // Start conversation. await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.ClNonCustomer], true); startConversationOk = await client.StartConversationAsync(); // Search all profiles with type "last". requestMessage = mb.CreateProfileSearchRequest("last", null, null, null, 0, 1000, 1000, false, false); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.Ok; totalRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.TotalRecordCount == expectedLastClients.Count; maxResponseRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.MaxResponseRecordCount == 1000; profilesCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.Count == expectedLastClients.Count; resultsOk = client.CheckProfileListMatchSearchResultItems(expectedLastClients, responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.ToList(), false, false, client.GetIdentityId(), false); queryOk = idOk && statusOk && totalRecordCountOk && maxResponseRecordCountOk && profilesCountOk && resultsOk; client.CloseConnection(); // Step 6 Acceptance bool step6Ok = startConversationOk && queryOk; log.Trace("Step 6: {0}", step6Ok ? "PASSED" : "FAILED"); // Step 7 log.Trace("Step 7"); await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.SrNeighbor], true); verifyIdentityOk = await client.VerifyIdentityAsync(); // Select 40 profiles for deletion. deleteUpdateItems = new List <SharedProfileUpdateItem>(); while (deleteUpdateItems.Count < 40) { int index = Rng.Next(TestProfiles.Count); ProtocolClient pc = TestProfiles.ElementAt(index).Value; deleteUpdateItems.Add(pc.GetSharedProfileUpdateDeleteItem()); if (expectedLastClients.ContainsKey(pc.Profile.Name)) { expectedLastClients.Remove(pc.Profile.Name); } TestProfiles.Remove(pc.Profile.Name); pc.Dispose(); } // Select 40 profiles for change, but avoid updating one profile twice in a single update message, which is forbidden. HashSet <int> usedIndexes = new HashSet <int>(); List <SharedProfileUpdateItem> changeUpdateItems = new List <SharedProfileUpdateItem>(); while (changeUpdateItems.Count < 40) { int index = Rng.Next(TestProfiles.Count); if (usedIndexes.Contains(index)) { continue; } usedIndexes.Add(index); ProtocolClient pc = TestProfiles.ElementAt(index).Value; pc.Profile.ExtraData = "1234567890"; SharedProfileUpdateItem changeUpdateItem = new SharedProfileUpdateItem() { Change = new SharedProfileChangeItem() { IdentityNetworkId = ProtocolHelper.ByteArrayToByteString(pc.GetIdentityId()), SetExtraData = true, ExtraData = pc.Profile.ExtraData } }; changeUpdateItems.Add(changeUpdateItem); } // Generate 40 new identities. List <SharedProfileUpdateItem> addUpdateItems = new List <SharedProfileUpdateItem>(); for (int i = 0; i < 40; i++) { ProtocolClient protocolClient = new ProtocolClient(); protocolClient.InitializeRandomProfile(profileNumber, imageData); profileNumber++; protocolClient.Profile.Type = "last"; expectedLastClients.Add(protocolClient.Profile.Name, protocolClient); TestProfiles.Add(protocolClient.Profile.Name, protocolClient); addUpdateItems.Add(protocolClient.GetSharedProfileUpdateAddItem()); } // Send all the updates as one. List <SharedProfileUpdateItem> newUpdateItems = new List <SharedProfileUpdateItem>(); newUpdateItems.AddRange(deleteUpdateItems); newUpdateItems.AddRange(changeUpdateItems); newUpdateItems.AddRange(addUpdateItems); requestMessage = mb.CreateNeighborhoodSharedProfileUpdateRequest(newUpdateItems); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.Ok; bool newUpdateOk = idOk && statusOk; client.CloseConnection(); // Step 7 Acceptance bool step7Ok = verifyIdentityOk && newUpdateOk; log.Trace("Step 7: {0}", step7Ok ? "PASSED" : "FAILED"); // Step 8 log.Trace("Step 8"); // Start conversation. await client.ConnectAsync(ServerIp, (int)rolePorts[ServerRoleType.ClNonCustomer], true); startConversationOk = await client.StartConversationAsync(); // Search all profiles with type "last". requestMessage = mb.CreateProfileSearchRequest("last", null, null, null, 0, 1000, 1000, false, false); await client.SendMessageAsync(requestMessage); responseMessage = await client.ReceiveMessageAsync(); idOk = responseMessage.Id == requestMessage.Id; statusOk = responseMessage.Response.Status == Status.Ok; totalRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.TotalRecordCount == expectedLastClients.Count; maxResponseRecordCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.MaxResponseRecordCount == 1000; profilesCountOk = responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.Count == expectedLastClients.Count; resultsOk = client.CheckProfileListMatchSearchResultItems(expectedLastClients, responseMessage.Response.ConversationResponse.ProfileSearch.Profiles.ToList(), false, false, client.GetIdentityId(), false); queryOk = idOk && statusOk && totalRecordCountOk && maxResponseRecordCountOk && profilesCountOk && resultsOk; client.CloseConnection(); // Step 8 Acceptance bool step8Ok = startConversationOk && queryOk; log.Trace("Step 8: {0}", step8Ok ? "PASSED" : "FAILED"); Passed = step1Ok && step2Ok && step3Ok && step4Ok && step5Ok && step6Ok && step7Ok && step8Ok; res = true; } catch (Exception e) { log.Error("Exception occurred: {0}", e.ToString()); } client.Dispose(); foreach (ProtocolClient protocolClient in TestProfiles.Values) { protocolClient.Dispose(); } if (profileServer != null) { profileServer.Shutdown(); } if (locServer != null) { locServer.Shutdown(); } log.Trace("(-):{0}", res); return(res); }
private void button3_Click(object sender, EventArgs e) { Console.WriteLine("asd"); //Слово для поиска string word = this.textBox3.Text.Trim(); //Если слово для поиска не пусто if (!string.IsNullOrWhiteSpace(word) && list.Count > 0) { int maxDist; if (!int.TryParse(this.textBox5.Text.Trim(), out maxDist)) { MessageBox.Show("Необходимо указать максимальное расстояние"); return; } if (maxDist < 1 || maxDist > 5) { MessageBox.Show("Максимальное расстояние должно быть в диапазоне от 1 до 5"); return; } int ThreadCount; if (!int.TryParse(this.textBox7.Text.Trim(), out ThreadCount)) { MessageBox.Show("Необходимо указать количество потоков"); return; } Stopwatch timer = new Stopwatch(); timer.Start(); //------------------------------------------------- // Начало параллельного поиска //------------------------------------------------- //Результирующий список List <ParallelSearchResult> Result = new List <ParallelSearchResult>(); //Деление списка на фрагменты для параллельного запуска в потоках List <MinMax> arrayDivList = SubArrays.DivideSubArrays(0, list.Count, ThreadCount); int count = arrayDivList.Count; //Количество потоков соответствует количеству фрагментов массива Task <List <ParallelSearchResult> >[] tasks = new Task <List <ParallelSearchResult> > [count]; //Запуск потоков for (int i = 0; i < count; i++) { List <string> tempTaskList = list.GetRange(arrayDivList[i].Min, arrayDivList[i].Max - arrayDivList[i].Min); tasks[i] = new Task <List <ParallelSearchResult> >(ArrayThreadTask, new ParallelSearchThreadParam() { tempList = tempTaskList, maxDist = maxDist, ThreadNum = i, wordPattern = word }); //Запуск потока tasks[i].Start(); } Task.WaitAll(tasks); timer.Stop(); //Объединение результатов for (int i = 0; i < count; i++) { Result.AddRange(tasks[i].Result); } //------------------------------------------------- // Завершение параллельного поиска //------------------------------------------------- timer.Stop(); //Вывод результатов //Время поиска this.textBox6.Text = timer.Elapsed.ToString(); //Вычисленное количество потоков this.textBox8.Text = count.ToString(); //Начало обновления списка результатов this.listBox1.BeginUpdate(); //Очистка списка this.listBox1.Items.Clear(); //Вывод результатов поиска foreach (var x in Result) { string temp = x.word + "(расстояние=" + x.dist.ToString() + " поток=" + x.ThreadNum.ToString() + ")"; this.listBox1.Items.Add(temp); } //Окончание обновления списка результатов this.listBox1.EndUpdate(); } else { MessageBox.Show("Необходимо выбрать файл и ввести слово для поиска"); } }
private void HandleFailedNotification(int identifier, byte status) { // Get the index of our failed notification (by identifier) var failedIndex = _sentNotifications.FindIndex(n => n.Identifier == identifier); if (failedIndex < 0) { return; } Log.Info("Failed Notification on channel {1}: {0}", identifier, _channelInstanceId); //Get all the notifications before the failed one and mark them as sent! if (failedIndex > 0) { var successful = _sentNotifications.GetRange(0, failedIndex); successful.ForEach(n => { Interlocked.Decrement(ref _trackedNotificationCount); if (n.Callback != null) { n.Callback(this, new SendNotificationResult(n.Notification)); } }); _sentNotifications.RemoveRange(0, failedIndex); } //Get the failed notification itself var failedNotification = _sentNotifications[0]; //Fail and remove the failed index from the list Interlocked.Decrement(ref _trackedNotificationCount); if (failedNotification.Callback != null) { failedNotification.Callback(this, new SendNotificationResult(failedNotification.Notification, false, new NotificationFailureException(status, failedNotification.Notification))); } _sentNotifications.RemoveAt(0); // Notifications sent after the failure must be re-sent _sentNotifications.Reverse(); _sentNotifications.ForEach(n => { Interlocked.Decrement(ref _trackedNotificationCount); if (failedNotification.Callback != null) { failedNotification.Callback(this, new SendNotificationResult(n.Notification, true, new Exception("Sent after previously failed Notification.")) { CountsAsRequeue = false }); } }); _sentNotifications.Clear(); }