protected static void GuerillaPreProcessMethod(BinaryReader binaryReader, IList<tag_field> fields) { var field = fields.Last(x => x.type != field_type._field_terminator); fields.Remove(field); field = fields.Last(x => x.type != field_type._field_terminator); fields.Remove(field); }
private IList<TripleExponentialEntry> BuildLastSeason(IList<TripleExponentialEntry> currSeason, IEnumerable<DataEntry> dataEntries, int currPeriod) { //building the last season is similar to NextSeason, but uses the same Ft and Tt once calculated for the first entry. var currentFt = currSeason.Last().Ft; var currentTt = currSeason.Last().Tt; double currentSt = currSeason.Last().St, lastSt = currSeason.Last().St; var lastActual = dataEntries.Last().Value; IList<TripleExponentialEntry> newSeason = new List<TripleExponentialEntry>(); currentFt = (_alpha*(lastActual/lastSt)) + ((1 - _alpha)*(currentTt + currentFt)); currentTt = (_beta*(currentFt - currSeason.Last().Ft)) + ((1 - _beta)*currentTt); for (var currSeasonIndex = 1; currSeasonIndex <= _periodsPerSeason; currSeasonIndex++) { var lastPeriodActual = dataEntries.ElementAt((currPeriod + currSeasonIndex) - _periodsPerSeason - 2).Value; var lastPeriodFt = currSeason.ElementAt(currSeasonIndex - 1).Ft; var lastPeriodSt = currSeason.ElementAt(currSeasonIndex - 1).St; currentSt = (_gamma*(lastPeriodActual/lastPeriodFt)) + ((1 - _gamma)*lastPeriodSt); newSeason.Add(new TripleExponentialEntry { Ft = currentFt, Tt = currentTt, St = currentSt, Forecast = (currentFt + (currentTt*currSeasonIndex))*currentSt }); } return newSeason; }
public void Invoke(char[] code) { Tokens = new List<CToken>(); var currentToken = new StringBuilder(); // todo remove whitespace and comments foreach (char c in code) { if (char.IsWhiteSpace(c)) { continue; } switch (c) { case Punctuations.OPEN_PARENS: Tokens.Add(new CToken(currentToken.ToString(), NodeType.Caller)); Tokens.Add(new CToken(Punctuations.OPEN_PARENS.ToString(), NodeType.Punctuation)); currentToken = new StringBuilder(); break; case Punctuations.CLOSE_PARENS: Tokens.Add(new CToken(currentToken.ToString(), NodeType.Variable)); Tokens.Add(new CToken(Punctuations.CLOSE_PARENS.ToString(), NodeType.Punctuation)); currentToken = new StringBuilder(); break; case Punctuations.COMMA: if (currentToken.Length != 0) { Tokens.Add(new CToken(currentToken.ToString(), NodeType.Variable)); } Tokens.Add(new CToken(Punctuations.COMMA.ToString(), NodeType.Punctuation)); currentToken = new StringBuilder(); break; case Punctuations.DOUBLE_QUOTE: if (currentToken.Length == 0) { Tokens.Add(new CToken(Punctuations.DOUBLE_QUOTE.ToString(), NodeType.Punctuation)); } else { Tokens.Add(new CToken(currentToken.ToString(), NodeType.String)); Tokens.Add(new CToken(Punctuations.DOUBLE_QUOTE.ToString(), NodeType.Punctuation)); currentToken = new StringBuilder(); } break; case Punctuations.SEMI_COL: { Tokens.Add(new CToken(Punctuations.SEMI_COL.ToString(), NodeType.Punctuation)); break; } } if (char.IsLetterOrDigit(c) || (Tokens.Last() != null && Tokens.Last().Value == "\"" && c != '"')) { currentToken.Append(c); } } }
public PropertyAccessor(IList<PropertyInfo> propertyChain, Type genericType, Type collectionType) : this(propertyChain) { if (genericType == null) throw new ArgumentNullException("genericType"); GenericType = genericType; CollectionType = collectionType; AddMethod = CollectionType.GetMethod("Add"); ItemPropertyInfo = propertyChain.Last().PropertyType.GetProperty("Item"); RemoveAtMethod = propertyChain.Last().PropertyType.GetMethod("RemoveAt"); IsCollection = true; }
public static void AddWays(this Polygon<OSMNode> polygon, IList<OSMWay> ways, OSMDB db) { if (ways.Count == 1) { // Check if the created polygon is closed if (ways[0].Nodes.Count > 0 && ways[0].Nodes.First() != ways[0].Nodes.Last()) { throw new ArgumentException("Ways does not form a closed polygon"); } for (int i = 0; i < ways[0].Nodes.Count - 1; i++) { polygon.AddVertex(db.Nodes[ways[0].Nodes[i]]); } } else { int lastVertexID = 0; if (ways[0].Nodes.First() == ways.Last().Nodes.First() || ways[0].Nodes.Last() == ways.Last().Nodes.First()) { lastVertexID = ways.Last().Nodes.First(); } else { lastVertexID = ways.Last().Nodes.Last(); } //// Check orientation of the first way //if (ways[0].Nodes.First() == ways[1].Nodes.First() || ways[0].Nodes.First() == ways[1].Nodes.First()) { // for (int ii = ways[0].; ii < verticesToAdd.Count - 1; ii++) { // AddVertex(verticesToAdd[ii]); // } //} for (int i = 0; i < ways.Count; i++) { List<int> verticesToAdd = new List<int>(); // Checks the way orienatation and picks nodes in correct order if (lastVertexID == ways[i].Nodes[0]) { verticesToAdd.AddRange(ways[i].Nodes); } else if (lastVertexID == ways[i].Nodes.Last()) { verticesToAdd.AddRange(ways[i].Nodes.Reverse()); } else { throw new ArgumentException("Can not create polygon, ways aren't connected"); } for (int ii = 0; ii < verticesToAdd.Count - 1; ii++) { polygon.AddVertex(db.Nodes[verticesToAdd[ii]]); } lastVertexID = verticesToAdd.Last(); } // Check if the created polygon is closed if (polygon.VerticesCount > 0 && polygon.Vertices.First() != db.Nodes[lastVertexID]) { throw new ArgumentException("Ways does not form a closed polygon"); } } }
static IEnumerable<BottomProfilePoint> Filter(IList<BottomProfilePoint> source) { var curProfilePoint = source[0]; yield return curProfilePoint; foreach (var point in source.Where(point => curProfilePoint.Depth != point.Depth)) { curProfilePoint = point; yield return point; } if (curProfilePoint.Range < source.Last().Range) yield return source.Last(); }
void SortCollectedList() { var sorted = _collectedList.OrderByDescending(x => x.Value); _collectedList = null; _collectedList = sorted.Cast<MapData>().ToList(); _lowest = _collectedList.Last().Value; }
protected PropertyExpression(object obj, IEnumerable<PropertyExpressionPart> parts) { if (obj == null) throw new ArgumentNullException("obj"); if (parts == null) throw new ArgumentNullException("parts"); /*foreach (object item in parts) { if (item == null) { throw new ArgumentException("An item inside the enumeration was null.", "parts"); } }*/ if (parts.Cast<object>().Any(item => item == null)) { throw new ArgumentException(Resources.AnItemWasNull, "parts"); } _parts = new List<PropertyExpressionPart>(parts); if (_parts.Count == 0) throw new ArgumentException(Resources.OnePartRequired, "parts"); _finalPart = _parts.Last(); //whenever the final part's value changes, that means the value of the expression as a whole has changed _finalPart.ValueChanged += delegate { OnValueChanged(); }; //assign the initial object in the potential chain of objects resolved by the expression parts _parts[0].Object = obj; }
public void AssignFragmentPeak(IList<IAtomContainer> fragments, IEnumerable<Peak> peakList, double mzabs, double mzppm) { hits = new List<PeakMolPair>(); hitsAll = new List<PeakMolPair>(); foreach (var peak in peakList) { var haveFoundAMatch = false; foreach (var fragment in fragments) { //matched peak int hydrogensAdded; double matchedMass; double hydrogenPenalty; if (MatchByMass(fragment, peak.Mass, mzabs, mzppm, out matchedMass, out hydrogenPenalty, out hydrogensAdded)) { var match = new PeakMolPair(fragment, peak, matchedMass, GetMolecularFormulaAsString(fragment), hydrogenPenalty, double.Parse((string)fragment.getProperty("BondEnergy"), CultureInfo.InvariantCulture), GetNeutralChange(fragment, hydrogensAdded)); hitsAll.Add(match); // If we don't yet have a match, add it if (!haveFoundAMatch) { hits.Add(match); haveFoundAMatch = true; } // If we do have a match, replace it if this new match has a lower hydrogen penalty else if (hydrogenPenalty < hits.Last().HydrogenPenalty) { hits.RemoveAt(hits.Count - 1); hits.Add(match); } } } } }
private void CheckFirstAndLastPoint(IList<Point> points) { if (this.DistanceIsTooSmall(points.Last(), points.First())) { points.RemoveAt(points.Count - 1); } }
private int SolveFive(BombSettings settings, IList<Color> colors) { if (settings.SerialLastDigit == Parity.NotSet) { throw new NeedSerialLastDigitException(); } // If the last wire is black and the last digit of the serial number is odd, cut the fourth wire. else if (colors.Last() == Color.Black && settings.SerialLastDigit == Parity.Odd) { return 4; } // Otherwise, if there is exactly one red wire and there is more than one yellow wire, cut the first wire. else if (colors.Count(c => c == Color.Red) == 1 && colors.Count(c => c == Color.Yellow) > 1) { return 1; } // Otherwise, if there are no black wires, cut the second wire. else if (colors.Count(c => c == Color.Black) == 0) { return 2; } // Otherwise, cut the first wire. else { return 1; } }
private void CheckFirstAndLastPoint(IList<DepthPointEx> points) { if (PointsAreClose(points.Last(), points.First())) { points.RemoveAt(points.Count - 1); } }
public MainWindow() { InitializeComponent(); Loaded += (sender, e) => ClearValue(SizeToContentProperty); _allControllerViewModels = (from type in GetType().Assembly.GetTypes() where !type.IsInterface && typeof(IController).IsAssignableFrom(type) let viewModel = new ControllerViewModel((IController)Activator.CreateInstance(type)) orderby viewModel.SortIndex, viewModel.Library, viewModel.Description select viewModel).ToList(); _allControllerViewModels.First().IsChecked = true; ControllerGroups.ItemsSource = _allControllerViewModels.GroupBy(viewModel => viewModel.Library); _allResolutionViewModels = new[] { new ResolutionViewModel(800, 600, 50, 42), new ResolutionViewModel(1024, 768, 64, 54), new ResolutionViewModel(1280, 1024, 80, 73), new ResolutionViewModel(1440, 900, 90, 64), }; _allResolutionViewModels.Last( res => res.Width <= SystemParameters.PrimaryScreenWidth && res.Height <= SystemParameters.PrimaryScreenHeight).IsChecked = true; Resolutions.ItemsSource = _allResolutionViewModels; RunResults.ItemsSource = _runResults; }
public FromImportCompletionAnalysis(IList<ClassificationSpan> tokens, ITrackingSpan span, ITextBuffer textBuffer, CompletionOptions options) : base(span, textBuffer, options) { Debug.Assert(tokens[0].Span.GetText() == "from"); int beforeImportToken = tokens .TakeWhile(tok => !(tok.ClassificationType.IsOfType(PredefinedClassificationTypeNames.Keyword) && tok.Span.GetText() == "import")) .Count(); bool lastIsDot = tokens.Last().ClassificationType.IsOfType(JPredefinedClassificationTypeNames.Dot); _modulesOnly = beforeImportToken == tokens.Count; _noCompletions = !_modulesOnly && lastIsDot; _includeStar = beforeImportToken == tokens.Count - 1; if (beforeImportToken >= 2) { // If there are at least two tokens ('from' <name>) before the // 'import' token, use completions from that package. if (beforeImportToken < tokens.Count || lastIsDot) { _namespace = tokens .Take(beforeImportToken) .Where(tok => tok.ClassificationType.IsOfType(PredefinedClassificationTypeNames.Identifier)) .Select(tok => tok.Span.GetText()) .ToArray(); } else { _importKeywordOnly = true; } } }
private void ParseRealization(IList<Double> realization) { Double repeatedValue = realization.Last(); // last value it's the same as cycle start value Int32 cycleStartIndex = realization.IndexOf(repeatedValue); Appendix = realization.Take(cycleStartIndex).ToList(); Cycle = realization.Skip(cycleStartIndex).Take(realization.Count - cycleStartIndex - 1).ToList(); }
protected void AddLastIndexAsSentenceBoundary(IList<int> indices, int lastIndex) { if (indices.Count != 0 && indices.Last() != lastIndex) { indices.Add(lastIndex); } }
public string GetStashName(int index) { if (index >= TotalStashes || index < 0) { return(string.Empty); } var viewAllStashPanelChildren = this.ViewAllStashPanelChildren; Element element; if (viewAllStashPanelChildren == null) { element = null; } else { var element2 = viewAllStashPanelChildren.ElementAt(index); IList <Element> children = element2.GetChildAtIndex(0).Children; if (element2 == null) { element = null; } else { element = children?.Last(); } } return(element == null ? string.Empty : element.Text); }
private static void GenerateChains(IList<int> chain, int n) { var count = chain.Count; if (count >= n) return; var last = chain.Last(); var nextElems = _dict[last].Where(k => !chain.Contains(k)).ToList(); if (nextElems.Any() && count < n) { foreach (var next in nextElems) { var deeper = chain.ToList(); deeper.Add(next); GenerateChains(deeper, n); } } else if (IsPrime(last + 1) && count == n - 1) { _nrOfChains++; /*foreach (var elem in chain) { Console.Write(elem+" "); } Console.WriteLine(1);*/ } }
public void PaginateWithSkipPast_RowCount() { var repository = GetScenario(); var batchKey = Guid.NewGuid().ToString(); var originals = BuildEmployees(RowCount, batchKey); repository.InsertBatch(originals); var expectedCount = 25; //pretend you asked the database for this number var count = 0; var page = 0; const int take = 10; IList <TEmployeeSimple>?lastSet = null; do { var skipPast = lastSet?.Last(); lastSet = repository.PaginateWithSkipPast(batchKey, skipPast, take); count += lastSet.Count; page += 1; } while (count < expectedCount); Assert.AreEqual(25, count); Assert.AreEqual(3, page); //no over-run }
public void PaginateWithSkipPast_Overrun() { var repository = GetScenario(); var batchKey = Guid.NewGuid().ToString(); var originals = BuildEmployees(RowCount, batchKey); repository.InsertBatch(originals); var count = 0; var page = 0; const int take = 10; IList <TEmployeeSimple>?lastSet = null; do { var skipPast = lastSet?.Last(); lastSet = repository.PaginateWithSkipPast(batchKey, skipPast, take); count += lastSet.Count; page += 1; } while (lastSet.Count > 0); Assert.AreEqual(25, count); Assert.AreEqual(4, page); //one page of over-run }
public Path(IList<SimplePoint> points) { if (points == null) { throw new ArgumentNullException("points"); } if (points.Any(x => x == null)) { throw new ArgumentException("points", ErrorMessages.msgPath_Error_NoNullPointAllowedInTheCollection); } if (points.Count < MINIMUM_AMMOUNT_OF_POINTS) { throw new ArgumentException("points", ErrorMessages.msgPath_Error_AtLeastTwoPointsInTheCollection); } if (points.First().Equals(points.Last())) { throw new ArgumentException("points", ErrorMessages.msgPath_Error_FirstAndLastPointInTheCollectionShouldNotBeTheSame); } this._points = points; }
public IList<IEdge> GetEdgePath(IList<IVertex> items) { var result = new List<IEdge>(); if (items.Count == 2 & items.First() == items.Last()) return result; for (var i = 0; i < items.Count - 1; i++) { result.Add(_edges[items[i].Number, items[i + 1].Number]); } return result; }
public PenaltyProcessor(IList<Jam> jams, Dictionary<string, Player> homePlayers, Dictionary<string, Player> awayPlayers) { _jams = jams; _players = homePlayers.Values.ToDictionary(p => p.ID); foreach(Player player in awayPlayers.Values) { _players[player.ID] = player; } _lastJam = _jams.Last(); }
public static Dataset BuildVaultDataset(Context context, FieldCollection fields, string uri, bool locked, IList <InlinePresentationSpec> inlinePresentationSpecs = null) { var intent = new Intent(context, typeof(MainActivity)); intent.PutExtra("autofillFramework", true); if (fields.FillableForLogin) { intent.PutExtra("autofillFrameworkFillType", (int)CipherType.Login); } else if (fields.FillableForCard) { intent.PutExtra("autofillFrameworkFillType", (int)CipherType.Card); } else if (fields.FillableForIdentity) { intent.PutExtra("autofillFrameworkFillType", (int)CipherType.Identity); } else { return(null); } intent.PutExtra("autofillFrameworkUri", uri); var pendingIntent = PendingIntent.GetActivity(context, ++_pendingIntentId, intent, PendingIntentFlags.CancelCurrent); var overlayPresentation = BuildOverlayPresentation( AppResources.AutofillWithBitwarden, locked ? AppResources.VaultIsLocked : AppResources.GoToMyVault, Resource.Drawable.icon, context); var inlinePresentation = BuildInlinePresentation( inlinePresentationSpecs?.Last(), AppResources.Bitwarden, locked ? AppResources.VaultIsLocked : AppResources.MyVault, Resource.Drawable.icon, pendingIntent, context); var datasetBuilder = new Dataset.Builder(overlayPresentation); if (inlinePresentation != null) { datasetBuilder.SetInlinePresentation(inlinePresentation); } datasetBuilder.SetAuthentication(pendingIntent?.IntentSender); // Dataset must have a value set. We will reset this in the main activity when the real item is chosen. foreach (var autofillId in fields.AutofillIds) { datasetBuilder.SetValue(autofillId, AutofillValue.ForText("PLACEHOLDER")); } return(datasetBuilder.Build()); }
private static Panel GetPlayerPanel(IList<PictureBox> cardHolders) { Panel panel = new Panel(); panel.Location = new Point(cardHolders.First().Left - 10, cardHolders.Last().Top - 10); panel.BackColor = Color.DarkBlue; panel.Height = 150; panel.Width = 180; panel.Visible = false; return panel; }
private IList<int> GetThetas(IList<int> sortedCardinalities) { var result = new List<int>(); var maxCardinality = sortedCardinalities.First(); var minCardinality = sortedCardinalities.Last(); for (int j = maxCardinality; j >= minCardinality; j--) { int theta = this.GetTheta(j, sortedCardinalities); result.Add(theta); } return result; }
public Task MessagesDeliveredAsync(IList <IBatchContainer> messages) { var count = messages?.Count ?? 0; if (count == 0) { return(TaskDone.Done); } var lastToken = messages?.Count != 0 ? messages?.Last()?.SequenceToken.ToString() : "--"; _logger.AutoVerbose($"Delivered {count}, last one has token {lastToken}"); return(TaskDone.Done); }
private string GetStringFromYearsList(IList<short> list) { switch (list.Count) { case 1: return list[0].ToString(); case 2: return string.Join(" ", list[0], "in", list[1]); default: return string.Join(" ", string.Join(", ", list.Select(l => l.ToString()).ToArray<string>(), 0, list.Count - 1), "in", list.Last()); } }
public static Color GetColorAtOffset(IList<GradientStop> stops, double offset) { GradientStop s1 = stops[0], s2 = stops.Last(); foreach (var item in stops) { if (item.Offset < offset && item.Offset > s1.Offset) s1 = item; if (item.Offset > offset && item.Offset < s2.Offset) s2 = item; } return Color.FromArgb( (byte)((s1.Color.A + s2.Color.A) / 2), (byte)((s1.Color.R + s2.Color.R) / 2), (byte)((s1.Color.G + s2.Color.G) / 2), (byte)((s1.Color.B + s2.Color.B) / 2) ); }
private void TryInitializeViews() { if (_doc != null && this.IsLoaded) { _leftView = new MergeView(editLeft).SetDocument(_doc, MergeLocation.Left); _parentView = new MergeView(editParent).SetDocument(_doc, MergeLocation.Parent); _rightView = new MergeView(editRight).SetDocument(_doc, MergeLocation.Right); _outputView = new MergeView(editOutput).SetDocument(_doc, MergeLocation.Output); _scrollBoundaries = MergeView.GetScrollBoundaries(_leftView, _parentView, _rightView, _outputView); _views = new MergeView[] { _leftView, _parentView, _rightView, _outputView }; vertScroll.Minimum = 0.0; vertScroll.Maximum = _scrollBoundaries.Last(); } }
private static void BuildRoute(IList<TransitEdge> currentPath, string destinationNode, IEnumerable<TransitEdge> edges, Constraints constraints, ICollection<TransitPath> paths) { var currentEdge = currentPath.Last(); if (currentEdge.To == destinationNode) { var path = new TransitPath(currentPath); paths.Add(path); return; } var possibleEdges = FindPossibleEdges(currentEdge.To, currentEdge.ToDate, edges, constraints); foreach (var possibleEdge in possibleEdges) { var newPath = new List<TransitEdge>(currentPath) {possibleEdge}; BuildRoute(newPath, destinationNode,edges, constraints, paths); } }
double CalculateTimeToSLABreach(IList<DataPoint> snapshots) { //need at least 2 data points to be able to calculate if (snapshots.Count < 2) { return double.MaxValue; } DataPoint previous = null; var criticalTimeDelta = TimeSpan.Zero; foreach (var current in snapshots) { if (previous != null) { criticalTimeDelta += current.CriticalTime - previous.CriticalTime; } previous = current; } if (criticalTimeDelta.TotalSeconds <= 0.0) { return double.MaxValue; } var elapsedTime = snapshots.Last().OccurredAt - snapshots.First().OccurredAt; if (elapsedTime.TotalSeconds <= 0.0) { return double.MaxValue; } var lastKnownCriticalTime = snapshots.Last().CriticalTime.TotalSeconds; var criticalTimeDeltaPerSecond = criticalTimeDelta.TotalSeconds/elapsedTime.TotalSeconds; var secondsToSLABreach = (endpointSLA.TotalSeconds - lastKnownCriticalTime)/criticalTimeDeltaPerSecond; if (secondsToSLABreach < 0.0) { return 0.0; } return secondsToSLABreach; }
public static string Construct(IList<string> elements) { if(elements == null || !elements.Any()) { return ""; } var returnString = new StringBuilder(); foreach (var element in elements) { returnString.Append(element); if(element != elements.Last()) { returnString.Append(", "); } } return returnString.ToString(); }
/// <summary> /// Поиск подпоследовательности в потоке данных /// </summary> /// <remarks> /// Простейший вариант алгоритма Бойера — Мура /// </remarks> /// <param name="file">Поток данных</param> /// <param name="template">Подпоследовательность</param> /// <returns>true - если найдена</returns> public static bool searchInFile(Stream file, IList<byte> template) { // incorrect input if (file == null || template == null || template.Count == 0) return false; CircularQueue<byte> buff = new CircularQueue<byte>(template.Count); for (int t = file.ReadByte(); t != -1; t = file.ReadByte()) { buff.push((byte)t); if (template.Last().Equals((byte)t)) { if (buff.cmp(template)) return true; } } return false; }
private int SolveFour(BombSettings settings, IList<Color> colors) { if (settings.SerialLastDigit == Parity.NotSet) { throw new NeedSerialLastDigitException(); } // If there is more than one red wire and the last digit of the serial number is odd, cut the last red wire. else if (colors.Count(c => c == Color.Red) > 1 && settings.SerialLastDigit == Parity.Odd) { var index = colors.Select((w, i) => new KeyValuePair<int, Color>(i, w)) .Where(p => p.Value == Color.Red) .Last() .Key; return index + 1; } // Otherwise, if the last wire is yellow and there are no red wires, cut the first wire. else if (colors.Last() == Color.Yellow && colors.Count(c => c == Color.Red) == 0) { return 1; } // Otherwise, if there is exactly one blue wire, cut the first wire. else if (colors.Count(c => c == Color.Blue) == 1) { return 1; } // Otherwise, if there is more than one yellow wire, cut the last wire. else if (colors.Count(c => c == Color.Yellow) > 1) { return 4; } // Otherwise, cut the second wire. else { return 2; } }
public void AssertLastCallTime(DateTime time) => Assert.Equal(time, _callTimes.Last());
private static int GetRank(int score, IList <int> scores) { // Quick checks to save CPU time if (score >= scores.First()) { return(1); } if (score < scores.Last()) { return(scores.Count + 1); } int start = scores.Count / 2; int end = scores.Count; for (; ;) { if (score == scores[start]) { return(start + 1); } if (score > scores[start]) { while (score > scores[start]) { //Console.WriteLine("score " + score + "> " + scores[start]); end = start; //Console.WriteLine("score[end] = " + scores[end]); start >>= 1; } } if (score < scores[start]) { while (score < scores[start]) { //Console.WriteLine("score " + score + "< " + scores[start]); var lastStart = start; start += (end - start) / 2; if (start == lastStart) { break; } } } if (end - start == 1) { if (score < scores[start]) { return(start + 2); } if (score >= scores[start]) { return(start + 1); } } //Console.WriteLine("looking for score " + score + " in this subset: "); //for (int i = start; i < end; i++) // Console.Write(scores[i] + " "); //Console.WriteLine("Press ENTER"); //Console.ReadLine(); } return(start + 1); }
/// <summary> /// Return visibility report from first point to last point. We assume that all points are aligned. /// WARNING: those calculations are not spherical (yet) and are not accurate for long distances. /// <see cref="IntervisibilityMetrics"/> /// </summary> /// <param name="points">Input list of points, visibility is calculated for first and last points (ie: are they visible or is there a relief standing in between)</param> /// <returns><see cref="IntervisibilityMetrics"/> object</returns> internal static IntervisibilityMetrics ComputeVisibilityMetrics(IList <GeoPoint> points, bool visibilityCheck = true, double sourceVerticalOffset = 0d, double?noDataValue = null) { IntervisibilityMetrics metrics = new IntervisibilityMetrics(); if (points.Count == 0) { return(metrics); } GeoPoint A = points.First(), B = points.Last(); double hA = A.Elevation ?? 0d, hB = B.Elevation ?? 0d; hA += sourceVerticalOffset; double AB = A.DistanceTo(B); visibilityCheck = visibilityCheck && (AB > double.Epsilon); if (hA < hB) { MathHelper.Swap(ref A, ref B); MathHelper.Swap(ref hA, ref hB); } double total = 0, minElevation = double.MaxValue, maxElevation = double.MinValue, totalClimb = 0, totalDescent = 0; GeoPoint firstPoint = points[0]; firstPoint.DistanceFromOriginMeters = 0; // force at 0. If null, ignored in json responses double lastElevation = firstPoint.Elevation ?? 0; IntervisibilityObstacle obstacle = null; double lastPeakElevation = 0; int numNoDataPoints = 0; for (int i = 1; i < points.Count; i++) { #region metrics GeoPoint curPoint = points[i]; double v_dist = DistanceTo(curPoint, points[i - 1]); total += v_dist; curPoint.DistanceFromOriginMeters = total; minElevation = Math.Min(minElevation, curPoint.Elevation ?? double.MaxValue); maxElevation = Math.Max(maxElevation, curPoint.Elevation ?? double.MinValue); numNoDataPoints += curPoint.Elevation == noDataValue ? 1 : 0; double currentElevation = curPoint.Elevation ?? lastElevation; double diff = currentElevation - lastElevation; if (diff > 0) { totalClimb += diff; } else { totalDescent += diff; } #endregion #region visibility checks // Visibility check // If obstacle hit, add it and if (visibilityCheck) { double distToLowestPoint = curPoint.DistanceTo(B); double visibilityElevationThreshold = (distToLowestPoint * (hA - hB)) / AB + hB; if (currentElevation >= visibilityElevationThreshold) { if (obstacle == null) { obstacle = new IntervisibilityObstacle(curPoint, visibilityElevationThreshold); lastPeakElevation = currentElevation; obstacle.PeakPoint = curPoint; } else { // still inside obstacle, find peak if (currentElevation > lastPeakElevation) { lastPeakElevation = currentElevation; obstacle.PeakPoint = curPoint; } } } else { if (obstacle != null) // out of obstacle, register it { obstacle.ExitPoint = curPoint; metrics.AddObstacle(obstacle); obstacle = null; } } } if (i == points.Count - 1 && obstacle != null) { // Edge case: last point is exit point. We still have an active obstacle instance // If obstacle entry is curPoint, this is the same point and this is not an obstacle if (!obstacle.EntryPoint.Equals(curPoint)) { obstacle.ExitPoint = curPoint; metrics.AddObstacle(obstacle); obstacle = null; } } #endregion lastElevation = currentElevation; } metrics.Climb = totalClimb; metrics.Descent = totalDescent; metrics.NumPoints = points.Count; metrics.Distance = total; metrics.MinElevation = minElevation; metrics.MaxElevation = maxElevation; metrics.HasVoids = numNoDataPoints > 0; metrics.NumVoidPoints = numNoDataPoints; return(metrics); }
/// <summary> /// Step 2 of 3 - Convert tokens to commands (Abstract Syntax Tree) /// </summary> /// <param name="tokens"></param> /// <returns></returns> public static CSProgram ConvertTokensToCommands(IList <Token> tokens) { var csProgram = new CSProgram(); var currentProgramAddr = Constants.BASE_ADDR_PROGRAM; var currentVariableAddr = Constants.BASE_ADDR_VARIABLES; var bracesOpened = 0; Command parentCommand = null; var currentCommandTokens = new List <Token>(); var lastToken = tokens.Last(); foreach (var token in tokens) { currentCommandTokens.Add(token); if (bracesOpened == 0 && token is CloseBracesToken) { throw new UnmatchingBracesException(); } else if (bracesOpened > 0 && token is CloseBracesToken) { parentCommand = null; bracesOpened--; currentCommandTokens.Clear(); } else if (currentCommandTokens.Count == 7 && currentCommandTokens[0] is KeywordToken && currentCommandTokens[1] is OpenParenthesisToken && currentCommandTokens[2] is IdentifierToken && currentCommandTokens[3] is ComparisonToken && currentCommandTokens[4] is IdentifierToken && currentCommandTokens[5] is CloseParenthesisToken && currentCommandTokens[6] is OpenBracesToken && currentCommandTokens[0].Text == "if" ) { // Test whether is a If Instruction var variableLeftOperandName = currentCommandTokens[2].Text; var variableRightOperandName = currentCommandTokens[4].Text; Variable variableLeftOperand = GetVariableByName(csProgram, variableLeftOperandName); Variable variableRightOperand = GetVariableByName(csProgram, variableRightOperandName); var command = new IfInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.BaseInstructionAddress = currentProgramAddr; command.VariableLeftOperand = variableLeftOperand; command.VariableRightOperand = variableRightOperand; // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); parentCommand = command; bracesOpened++; csProgram.Commands.Add(command); currentCommandTokens.Clear(); } else if (token is SemicolonToken || token == lastToken) { // Test whether is a Var Definition Instruction if (VarDefinitionInstruction.CheckFormat(currentCommandTokens)) // TODO: make all checks like this one { var variableName = currentCommandTokens[1].Text; var variableValue = currentCommandTokens[3].Text; if (csProgram.Variables.Where(x => x.Name == variableName).Count() > 0) { throw new VariableAlreadyDefinedException(variableName); } if (int.Parse(variableValue) > 255) { throw new VariableOutsideOfRangeException(variableName); } var command = new VarDefinitionInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); var variable = new Variable(); variable.Name = variableName; variable.Address = currentVariableAddr; variable.VarType = EnumVarType.Byte; csProgram.Variables.Add(variable); currentVariableAddr++; // TODO: check type of var and increment it according to size of the type command.Variable = variable; //TODO: all instructions must update currentProgramAddr (or not)? // Update currentProgramAddr currentProgramAddr += command.MachineCode().Count; csProgram.Commands.Add(command); // NO intruction change memory var area in compile-time //machineCodeProgram.Bytes[this.GetNextVariableAddress()] = Convert.ToByte(variableValue); } // Test whether is an Atribution Instruction else if (currentCommandTokens.Count == 4 && currentCommandTokens[0] is IdentifierToken && currentCommandTokens[1] is EqualToken && currentCommandTokens[2] is OperandToken && currentCommandTokens[3] is SemicolonToken) { var variableDestinyName = currentCommandTokens[0].Text; var variableDestiny = GetVariableByName(csProgram, variableDestinyName); if (currentCommandTokens[2] is LiteralToken) { var literalValue = currentCommandTokens[2].Text; if (int.Parse(literalValue) > 255) { throw new VariableOutsideOfRangeException(variableDestinyName); } var command = new AtributionFromLiteralInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.VariableResult = variableDestiny; // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); //TODO: extract method if (parentCommand == null) { csProgram.Commands.Add(command); } else { ((ComplexCommand)parentCommand).InnerCommands.Add(command); } // Atribution intruction DON'T change memory var area! //machineCodeProgram.Bytes[this.GetNextVariableAddress()] = Convert.ToByte(literalValue); } else if (currentCommandTokens[2] is IdentifierToken) { var variableSourceName = currentCommandTokens[2].Text; var variableSource = GetVariableByName(csProgram, variableSourceName); var command = new AtributionFromVarInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.VariableSource = variableSource; command.VariableDestiny = variableDestiny; // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); csProgram.Commands.Add(command); // Atribution intruction DON'T change memory var area! //machineCodeProgram.Bytes[this.GetNextVariableAddress()] = Convert.ToByte(literalValue); } } // Test whether is an Increment Instruction else if (currentCommandTokens.Count == 4 && currentCommandTokens[0] is IdentifierToken && currentCommandTokens[1] is ArithmeticSignalToken && currentCommandTokens[2] is ArithmeticSignalToken && currentCommandTokens[3] is SemicolonToken) { var variableName = currentCommandTokens[0].Text; var arithmeticSignal1 = currentCommandTokens[1].Text; var arithmeticSignal2 = currentCommandTokens[2].Text; var variable = GetVariableByName(csProgram, variableName); var command = new IncrementInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.VariableOperand = variable; command.BaseInstructionAddress = currentProgramAddr; if (arithmeticSignal1 == "+" && arithmeticSignal2 == "+") { command.IncrementOperation = EnumIncrementOperation.Increment; } else if (arithmeticSignal1 == "-" && arithmeticSignal2 == "-") { command.IncrementOperation = EnumIncrementOperation.Decrement; } // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); csProgram.Commands.Add(command); } // Test whether is an Arithmetic Instruction else if (currentCommandTokens.Count == 6 && currentCommandTokens[0] is IdentifierToken && currentCommandTokens[1] is EqualToken && currentCommandTokens[2] is OperandToken && currentCommandTokens[3] is ArithmeticSignalToken && currentCommandTokens[4] is OperandToken && currentCommandTokens[5] is SemicolonToken) { var variableDestinyName = currentCommandTokens[0].Text; var variableDestiny = GetVariableByName(csProgram, variableDestinyName); if (currentCommandTokens[2] is IdentifierToken && currentCommandTokens[4] is LiteralToken) { //TODO: //var literalValue = currentCommandTokens[2].Text; //if (int.Parse(literalValue) > 255) //{ // throw new VariableOutsideOfRangeException(variableDestinyName); //} //var command = new AtributionFromLiteralInstruction(); //command.csProgram = this; //command.Tokens = new List<Token>(currentCommandTokens); //command.VariableResult = variableDestiny; //// add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfProgram(machineCodeProgram, currentProgramAddr, bytesOfCommand); //this.Commands.Add(command); //// Atribution intruction DON'T change memory var area! ////machineCodeProgram.Bytes[this.GetNextVariableAddress()] = Convert.ToByte(literalValue); } else if (currentCommandTokens[2] is IdentifierToken && currentCommandTokens[4] is IdentifierToken) { var variableLeftOperandName = currentCommandTokens[2].Text; var variableRightOperandName = currentCommandTokens[4].Text; var arithmeticOperation = currentCommandTokens[3].Text; var variableLeftOperand = GetVariableByName(csProgram, variableLeftOperandName); var variableRightOperand = GetVariableByName(csProgram, variableRightOperandName); var command = new ArithmeticInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.VariableLeftOperand = variableLeftOperand; command.VariableRightOperand = variableRightOperand; command.VariableDestiny = variableDestiny; switch (arithmeticOperation) { case "+": command.ArithmeticOperation = EnumArithmeticOperation.Addition; break; case "-": command.ArithmeticOperation = EnumArithmeticOperation.Subtraction; break; default: break; } // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); csProgram.Commands.Add(command); } } // Test whether is an Command Instruction else if (currentCommandTokens.Count == 7 && currentCommandTokens[0] is CommandToken && currentCommandTokens[1] is OpenParenthesisToken && currentCommandTokens[2] is LiteralToken && currentCommandTokens[3] is CommaToken && currentCommandTokens[4] is IdentifierToken && currentCommandTokens[5] is CloseParenthesisToken && currentCommandTokens[6] is SemicolonToken ) { var variableName = currentCommandTokens[4].Text; var variable = GetVariableByName(csProgram, variableName); var command = new CommandInstruction(); command.ParentCommand = parentCommand; command.CsProgram = csProgram; command.Tokens = new List <Token>(currentCommandTokens); command.VariableOperand = variable; // add bytes of program //var bytesOfCommand = command.MachineCode(); //currentProgramAddr = AddBytesOfCommand(machineCodeProgram, currentProgramAddr, bytesOfCommand); csProgram.Commands.Add(command); } else { IList <string> items = currentCommandTokens.Select(x => x.Text).ToList(); string instruction = items.Aggregate((i, j) => i + " " + j); throw new InvalidInstructionFormatException(instruction); } currentCommandTokens.Clear(); } } return(csProgram); }
public T GetMax <T>(IList <T> list) { this.sortStrategy.Sort <T>(list); return(list.Last()); }
public void Log131DataAdapter_UpdateInStore_New_Data_Range_Exceeding_LogMaxDataNodesGet() { AddParents(); DevKit.InitHeader(Log, LogIndexType.datetime); // Add 40 more mnemonics for (int i = 0; i < 40; i++) { Log.LogCurveInfo.Add(DevKit.LogGenerator.CreateDoubleLogCurveInfo($"Curve{i}", "m", (short)i)); } // Set column indexes for (int i = 0; i < Log.LogCurveInfo.Count; i++) { Log.LogCurveInfo[i].ColumnIndex = (short?)(i + 1); } //generate at least 2x the size of LogMaxDataNodesGet so we can update within a block at n+1 DevKit.InitDataMany(Log, DevKit.Mnemonics(Log), DevKit.Units(Log), WitsmlSettings.LogMaxDataNodesGet * 2, 1, false, false); for (int i = 0; i < Log.LogData.Count; i++) { for (int x = 0; x < Log.LogCurveInfo.Count - 3; x++) { Log.LogData[i] += $",{i}"; } } //partition the data so we don't exceed the max update node size List <string> logData = Log.LogData; IList <IEnumerable <string> > chunks = Log.LogData.Partition(WitsmlSettings.LogMaxDataNodesUpdate).ToList(); Log.LogData = null; Log.Uid = DevKit.Uid(); Log.Name = DevKit.Name($"LogMaxDataNodesGetPlusOne"); //add the log var response = DevKit.Add <LogList, Log>(Log); Assert.AreEqual((short)ErrorCodes.Success, response.Result); //query the log make sure it's there Log queryLog = new Log { Uid = Log.Uid, Name = Log.Name, UidWell = Log.UidWell, UidWellbore = Log.UidWellbore, NameWell = Log.NameWell, NameWellbore = Log.NameWellbore }; DevKit.GetAndAssert(queryLog); //add the chunks foreach (IEnumerable <string> chunk in chunks) { Log.LogData = chunk.ToList(); DevKit.UpdateAndAssert(Log); } //so now we have to construct an update that will cause a failure - something that lives beyond the 10k node default limit which will cause a new chunk to be created //instead of merging the data into the chunk it belongs in string firstIndex = logData[0].Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries)[0]; Log.LogData.Clear(); //first update will be at the very beginning of the range so we force the server to seek from start string[] items = chunks[0].First().Split <string>(",");// .Split(new[] {","}, StringSplitOptions.None); items[0] = firstIndex; Log.LogData.Add(string.Join(",", items)); //last update will be in the middle of the second 10k chunk of data (we need this update to be beyond the 10k hard default limit to break things) items = chunks.Last().Skip(Convert.ToInt32(chunks.Last().Count() / 2)).Take(1).First().Split <string>(","); Log.LogData.Add(string.Join(",", items)); //this will succeed, but will leave the chunk collection silently corrupted DevKit.UpdateAndAssert(Log); Log.LogData.Clear(); //now we construct an update that will include the newly created bad chunk and the original good chunk //we will take rows surrounding roughly the middle of the last chunk and perform an update that covers -2 -> +2 items = chunks.Last().Skip(Convert.ToInt32(chunks.Last().Count() / 2) - 2).Take(1).First() .Split <string>(","); Log.LogData.Add(string.Join(",", items)); items = chunks.Last().Skip(Convert.ToInt32(chunks.Last().Count() / 2) + 2).Take(1).First() .Split <string>(","); Log.LogData.Add(string.Join(",", items)); //this update will fail because we've now tried to update a range with the broken chunk and the original good chunk. DevKit.UpdateAndAssert(Log); //query the log to make sure it doesn't blow up DevKit.GetAndAssert(queryLog); }
private Stack <T> GetLastStack() { return(_list.Last()); }
private void AssertPlants(IList <string> result) { Assert.AreEqual(2, result.Count); Assert.AreEqual(Plant1, result.First()); Assert.AreEqual(Plant2, result.Last()); }
/// <summary> /// Peeks the specified list. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="list">The list.</param> /// <returns>T.</returns> /// TODO Edit XML Comment Template for Peek`1 public static T Peek <T>(this IList <T> list) { return(list.Any() ? list.Last() : default(T)); }
public void CreatePlayerCharts(IList <SenkaData> datas) { IList <SenkaData> bound = serverManager.GetPlayerBoundList(_server.ID, datas.Last()); Dictionary <int, Dictionary <DateTime, SenkaData> > boundDic = bound .GroupBy(r => r.Ranking) .ToDictionary( groupitem => groupitem.Key, value => value.ToDictionary(data => data.Date.DateTime, data => data) ); int upper = boundDic.Keys.Min(); int lower = boundDic.Keys.Max(); #region Declare _rankingChart = new ChartData(); _rankPointDeltaChart = new ChartData(); _rankPointChart = new ChartData(); SenkaData lastData = datas.Last(); ChartData.JsonData _rankPointData = new ChartData.JsonData(string.Format("{0}({1}位)", lastData.PlayerName, lastData.Ranking)); ChartData.JsonData _rankingaData = new ChartData.JsonData("順位"); ChartData.JsonData _deltaAM = new ChartData.JsonData("3~15時"); ChartData.JsonData _deltaPM = new ChartData.JsonData("15~27時"); ChartData.JsonData _upperData = new ChartData.JsonData(string.Format("{0}位", upper)); ChartData.JsonData _lowerData = new ChartData.JsonData(string.Format("{0}位", lower)); List <string> lables = new List <string>(); List <string> deltaLables = new List <string>(); List <int> rankPointValues = new List <int>(); List <int> rankingValues = new List <int>(); List <int> deltaAMValues = new List <int>(); List <int> deltaPMValues = new List <int>(); List <int> upperValues = new List <int>(); List <int> lowerValues = new List <int>(); #endregion Dictionary <DateTime, SenkaData> dic = datas.ToDictionary(d => d.Date.DateTime, d => d); DateTime end = boundDic[lower].Keys.Max(); DateTime last = boundDic[lower].Keys.Min(); DateTime date = last; if (last.Hour == 3 && last.Day != 1) { deltaAMValues.Add(ChartData.NONE); } while (date <= end) { if (dic.ContainsKey(date)) { SenkaData item = dic[date]; if (upper != lower) { upperValues.Add(boundDic[upper][date].RankPoint); } lowerValues.Add(boundDic[lower][date].RankPoint); rankPointValues.Add(item.RankPoint); rankingValues.Add(item.Ranking); //Delta Data if (date.Day == 1 && date.Hour == 15) { deltaAMValues.Add(item.RankPoint); } else if ((date - last).Hours == 12 && dic.ContainsKey(last)) { int delta = item.RankPoint - dic[last].RankPoint; if (date.Hour == 3) { deltaPMValues.Add(delta); } else { deltaAMValues.Add(delta); } } else { if (date.Hour == 3) { deltaPMValues.Add(ChartData.NONE); } else { deltaAMValues.Add(ChartData.NONE); } } last = date; } else { if (boundDic[lower].ContainsKey(date)) { if (upper != lower) { upperValues.Add(boundDic[upper][date].RankPoint); } lowerValues.Add(boundDic[lower][date].RankPoint); } else { if (upper != lower) { upperValues.Add(ChartData.NONE); } lowerValues.Add(ChartData.NONE); } rankPointValues.Add(ChartData.NONE); rankingValues.Add(ChartData.NONE); if (date.Hour == 3) { deltaPMValues.Add(ChartData.NONE); } else { deltaAMValues.Add(ChartData.NONE); } } //Lables if (date.Hour == 3) { lables.Add(date.Day.ToString()); } else { deltaLables.Add(date.Day.ToString()); lables.Add(""); } date = date.AddHours(12); } if (end.Hour == 15) { deltaPMValues.Add(ChartData.NONE); } if (deltaAMValues.Count > 0 && deltaAMValues[0] == ChartData.NONE && deltaPMValues[0] == ChartData.NONE) { deltaAMValues.RemoveAt(0); deltaPMValues.RemoveAt(0); } var amd = deltaAMValues.Distinct(); var pmd = deltaPMValues.Distinct(); if (amd.Count() == 1 && pmd.Count() == 1 && amd.First() == ChartData.NONE && pmd.First() == ChartData.NONE) { deltaAMValues.Clear(); deltaPMValues.Clear(); } _upperData.SetValue(upperValues); _lowerData.SetValue(lowerValues); _rankPointData.SetValue(rankPointValues); _rankingaData.SetValue(rankingValues); _deltaAM.SetValue(deltaAMValues); _deltaPM.SetValue(deltaPMValues); ChartData.JsonData[] _rankPointChartJsonData; if (upperValues.Count == 0) { _rankPointChartJsonData = new ChartData.JsonData[2] { _rankPointData, _lowerData }; } else { _rankPointChartJsonData = new ChartData.JsonData[3] { _rankPointData, _lowerData, _upperData }; } _rankingChart.SetData(new ChartData.JsonData[1] { _rankingaData }, lables.ToArray()); _rankPointDeltaChart.SetData(new ChartData.JsonData[2] { _deltaAM, _deltaPM }, deltaLables.ToArray()); _rankPointChart.SetData(_rankPointChartJsonData, lables.ToArray()); }
internal override RadSize GenerateContainer(IList <ItemInfo> itemInfos, MeasureContext context) { int startColumnId = -1; int slot = itemInfos.Last().Slot; int lastProjectedId = this.layout.GetLastProjectedId(slot); List <GeneratedItemModel> decorators; if (!this.generatedContainers.TryGetValue(slot, out decorators)) { decorators = new List <GeneratedItemModel>(); this.generatedContainers[slot] = decorators; } double oppositeAvalilableLength = this.IsHorizontal ? this.AvailableSize.Height : this.AvailableSize.Width; double largestLength = 0; double cumulativeOppositeScrollLength = 0; int lastRealizedId = -1; for (int i = 0; i < itemInfos.Count; i++) { var itemInfo = itemInfos[i]; // Check if item is realized from previous row. if (this.generatedContainerItems.Contains(itemInfo.Item)) { continue; } if (itemInfo.IsDisplayed || itemInfo.IsSummaryVisible) { if (cumulativeOppositeScrollLength + this.ItemWidth > context.OppositeAvailableLength) { break; } if (startColumnId == -1) { startColumnId = itemInfo.Id; } GeneratedItemModel decorator = this.GetPreviouslyVisibleDecorator(itemInfo.Item); // Recycle cells on this slot if container is Collasped/Expanded. if (decorator != null && (decorator.ItemInfo.IsCollapsed != itemInfo.IsCollapsed)) { this.Generator.RecycleDecorator(decorator); decorator = null; } if (decorator == null) { decorator = this.GenerateAndPrepareContainer(ref itemInfo); } decorator.ItemInfo = itemInfo; decorators.Add(decorator); this.generatedContainerItems.Add(itemInfo.Item); var desiredSize = this.Owner.Measure(decorator, this.GetContainerAvailableSize(decorator.ItemInfo)); decorator.DesiredSize = desiredSize; double length; double oppositeLength = 0; if (this.IsHorizontal) { var actualItemlength = this.ItemWidth > 0 ? this.ItemWidth : desiredSize.Height; oppositeLength = decorator.ItemInfo.Item is Telerik.Data.Core.IGroup ? oppositeAvalilableLength : actualItemlength; // TODO replace this with desired size. cumulativeOppositeScrollLength += oppositeLength; length = desiredSize.Width; } else { var actualItemlength = this.ItemWidth > 0 ? this.ItemWidth : desiredSize.Width; oppositeLength = decorator.ItemInfo.Item is Telerik.Data.Core.IGroup ? oppositeAvalilableLength : actualItemlength; cumulativeOppositeScrollLength += oppositeLength; length = desiredSize.Height; } largestLength = Math.Max(largestLength, length); this.layout.UpdateSlotLength(decorator.ItemInfo.Slot, largestLength); this.layout.ColumnSlotsRenderInfo.Update(decorator.ItemInfo.Id, oppositeLength); if (cumulativeOppositeScrollLength > context.OppositeAvailableLength && Math.Abs(cumulativeOppositeScrollLength - context.OppositeAvailableLength) > 1) { this.RecycleLocallyContainer(decorator); this.generatedContainerItems.Remove(decorator.ItemInfo.Item); break; } lastRealizedId = decorator.ItemInfo.Id; } } if (lastRealizedId >= 0) { this.layout.EndSlotMeasure(slot, startColumnId, lastProjectedId, lastRealizedId); } for (int i = 0; i < itemInfos.Count; i++) { this.UpdateFrozenContainerInfos(itemInfos[i]); } if (this.IsHorizontal) { return(new RadSize(largestLength, cumulativeOppositeScrollLength)); } else { return(new RadSize(cumulativeOppositeScrollLength, largestLength)); } }
private void InsertInstructions( MethodBody body, MethodReference methodDefinition, IList <Instruction> instructions, GameObjectActivity gameObjectActivity, bool behaviourNeedsToBeEnabled) { Instruction earlyReturnInstruction; if (methodDefinition.ReturnType.FullName != TypeSystem.VoidReference.FullName) { // Create new variable to return a value VariableDefinition variableDefinition = new VariableDefinition(methodDefinition.ReturnType); body.Variables.Add(variableDefinition); // Set variable to default value body.InitLocals = true; // Load variable Instruction loadInstruction = Instruction.Create(OpCodes.Ldloc, variableDefinition); instructions.Add(loadInstruction); // Return instructions.Add(Instruction.Create(OpCodes.Ret)); earlyReturnInstruction = loadInstruction; } else { earlyReturnInstruction = instructions.Last(instruction => instruction.OpCode == OpCodes.Ret); } int index = -1; if (gameObjectActivity == GameObjectActivity.InHierarchy && behaviourNeedsToBeEnabled) { // Load this (for isActiveAndEnabled getter call) instructions.Insert(++index, Instruction.Create(OpCodes.Ldarg_0)); // Call isActiveAndEnabled getter instructions.Insert( ++index, Instruction.Create(OpCodes.Callvirt, _getIsActiveAndEnabledMethodReference)); AddEarlyReturnInstruction(instructions, ref index, earlyReturnInstruction); } else { if (gameObjectActivity != GameObjectActivity.None) { // Load this (for gameObject getter call) instructions.Insert(++index, Instruction.Create(OpCodes.Ldarg_0)); // Call gameObject getter instructions.Insert(++index, Instruction.Create(OpCodes.Callvirt, _getGameObjectMethodReference)); // ReSharper disable once SwitchStatementMissingSomeCases switch (gameObjectActivity) { case GameObjectActivity.Self: // Call activeSelf getter instructions.Insert( ++index, Instruction.Create(OpCodes.Callvirt, _getActiveSelfMethodReference)); break; case GameObjectActivity.InHierarchy: // Call activeInHierarchy getter instructions.Insert( ++index, Instruction.Create(OpCodes.Callvirt, _getActiveInHierarchyMethodReference)); break; } AddEarlyReturnInstruction(instructions, ref index, earlyReturnInstruction); } if (behaviourNeedsToBeEnabled) { // Load this (for enabled getter call) instructions.Insert(++index, Instruction.Create(OpCodes.Ldarg_0)); // Call enabled getter instructions.Insert(++index, Instruction.Create(OpCodes.Callvirt, _getEnabledMethodReference)); AddEarlyReturnInstruction(instructions, ref index, earlyReturnInstruction); } } LogInfo($"Added (an) early return(s) to the method '{methodDefinition.FullName}'."); }
public PixelCollection Draw(PixelCollection pixels, IList <Point> drawingPoints, DrawingItemProperties properties) { return(Rectangle(pixels, drawingPoints.First(), drawingPoints.Last(), properties.Color)); }
public IList <TRunDetail> Execute() { var iterationResults = new List <TRunDetail>(); // Create a timer for the max duration of experiment. When given time has // elapsed, MaxExperimentTimeExpiredEvent is called to interrupt training // of current model. Timer is not used if no experiment time is given, or // is not a positive number. if (_experimentSettings.MaxExperimentTimeInSeconds > 0) { _maxExperimentTimeTimer = new Timer( new TimerCallback(MaxExperimentTimeExpiredEvent), null, _experimentSettings.MaxExperimentTimeInSeconds * 1000, Timeout.Infinite ); } // If given max duration of experiment is 0, only 1 model will be trained. // _experimentSettings.MaxExperimentTimeInSeconds is of type uint, it is // either 0 or >0. else { _experimentTimerExpired = true; } // Add second timer to check for the cancelation signal from the main MLContext // to the active child MLContext. This timer will propagate the cancelation // signal from the main to the child MLContexs if the main MLContext is // canceled. _mainContextCanceledTimer = new Timer(new TimerCallback(MainContextCanceledEvent), null, 1000, 1000); // Pseudo random number generator to result in deterministic runs with the provided main MLContext's seed and to // maintain variability between training iterations. int?mainContextSeed = ((ISeededEnvironment)_context.Model.GetEnvironment()).Seed; _newContextSeedGenerator = (mainContextSeed.HasValue) ? RandomUtils.Create(mainContextSeed.Value) : null; do { try { var iterationStopwatch = Stopwatch.StartNew(); // get next pipeline var getPipelineStopwatch = Stopwatch.StartNew(); // A new MLContext is needed per model run. When max experiment time is reached, each used // context is canceled to stop further model training. The cancellation of the main MLContext // a user has instantiated is not desirable, thus additional MLContexts are used. _currentModelMLContext = _newContextSeedGenerator == null ? new MLContext() : new MLContext(_newContextSeedGenerator.Next()); _currentModelMLContext.Log += RelayCurrentContextLogsToLogger; var pipeline = PipelineSuggester.GetNextInferredPipeline(_currentModelMLContext, _history, _datasetColumnInfo, _task, _optimizingMetricInfo.IsMaximizing, _experimentSettings.CacheBeforeTrainer, _logger, _trainerAllowList); // break if no candidates returned, means no valid pipeline available if (pipeline == null) { break; } // evaluate pipeline _logger.Trace($"Evaluating pipeline {pipeline.ToString()}"); (SuggestedPipelineRunDetail suggestedPipelineRunDetail, TRunDetail runDetail) = _runner.Run(pipeline, _modelDirectory, _history.Count + 1); _history.Add(suggestedPipelineRunDetail); WriteIterationLog(pipeline, suggestedPipelineRunDetail, iterationStopwatch); runDetail.RuntimeInSeconds = iterationStopwatch.Elapsed.TotalSeconds; runDetail.PipelineInferenceTimeInSeconds = getPipelineStopwatch.Elapsed.TotalSeconds; ReportProgress(runDetail); iterationResults.Add(runDetail); // if model is perfect, break if (_metricsAgent.IsModelPerfect(suggestedPipelineRunDetail.Score)) { break; } // If after third run, all runs have failed so far, throw exception if (_history.Count() == 3 && _history.All(r => !r.RunSucceeded)) { throw new InvalidOperationException($"Training failed with the exception: {_history.Last().Exception}"); } } catch (OperationCanceledException e) { // This exception is thrown when the IHost/MLContext of the trainer is canceled due to // reaching maximum experiment time. Simply catch this exception and return finished // iteration results. _logger.Warning(_operationCancelledMessage, e.Message); return(iterationResults); } catch (AggregateException e) { // This exception is thrown when the IHost/MLContext of the trainer is canceled due to // reaching maximum experiment time. Simply catch this exception and return finished // iteration results. For some trainers, like FastTree, because training is done in parallel // in can throw multiple OperationCancelledExceptions. This causes them to be returned as an // AggregateException and misses the first catch block. This is to handle that case. if (e.InnerExceptions.All(exception => exception is OperationCanceledException)) { _logger.Warning(_operationCancelledMessage, e.Message); return(iterationResults); } throw; } } while (_history.Count < _experimentSettings.MaxModels && !_experimentSettings.CancellationToken.IsCancellationRequested && !_experimentTimerExpired); return(iterationResults); }
public static IWebDriver GetDriverInstance() { return(_drvInstances.Last()); }
public void InferExecutorArrays(Context context, IList <NDArray> argArrays, IList <NDArray> gradArrays, IList <OpReqType> gradReqs, IList <NDArray> auxArrays, IDictionary <string, NDArray> argsMap, IDictionary <string, NDArray> argGradStore, IDictionary <string, OpReqType> gradReqType, IDictionary <string, NDArray> auxMap) { if (context == null) { throw new ArgumentNullException(nameof(context)); } if (argArrays == null) { throw new ArgumentNullException(nameof(argArrays)); } if (gradArrays == null) { throw new ArgumentNullException(nameof(gradArrays)); } if (gradReqs == null) { throw new ArgumentNullException(nameof(gradReqs)); } if (auxArrays == null) { throw new ArgumentNullException(nameof(auxArrays)); } if (argsMap == null) { throw new ArgumentNullException(nameof(argsMap)); } if (argGradStore == null) { throw new ArgumentNullException(nameof(argGradStore)); } if (gradReqType == null) { throw new ArgumentNullException(nameof(gradReqType)); } if (auxMap == null) { throw new ArgumentNullException(nameof(auxMap)); } this.ThrowIfDisposed(); var argNameList = this.ListArguments(); var inShapes = new List <List <mx_uint> >(); var auxShapes = new List <List <mx_uint> >(); var outShapes = new List <List <mx_uint> >(); var argShapes = new Dictionary <string, IList <mx_uint> >(); foreach (var argName in argNameList) { if (argsMap.TryGetValue(argName, out var value)) { argShapes[argName] = value.GetShape(); } } this.InferShape(argShapes, inShapes, auxShapes, outShapes); for (var i = 0; i < inShapes.Count; ++i) { var shape = inShapes[i]; var argName = argNameList[i]; if (argsMap.TryGetValue(argName, out var value1)) { argArrays.Add(value1); } else { argArrays.Add(new NDArray(shape, false)); NDArray.SampleGaussian(0, 1, argArrays.Last()); } if (argGradStore.TryGetValue(argName, out var value2)) { gradArrays.Add(value2); } else { gradArrays.Add(new NDArray(shape, false)); } if (gradReqType.TryGetValue(argName, out var value3)) { gradReqs.Add(value3); } else if (argName.LastIndexOf("data", StringComparison.InvariantCulture) == argName.Length - 4 || argName.LastIndexOf("label", StringComparison.InvariantCulture) == argName.Length - 5) { gradReqs.Add(OpReqType.NullOp); } else { gradReqs.Add(OpReqType.WriteTo); } } var auxNameList = this.ListAuxiliaryStates(); for (var i = 0; i < auxShapes.Count; ++i) { var shape = auxShapes[i]; var auxName = auxNameList[i]; if (auxMap.TryGetValue(auxName, out var value)) { auxArrays.Add(value); } else { auxArrays.Add(new NDArray(shape, false)); NDArray.SampleGaussian(0, 1, auxArrays.Last()); } } }
// set partial to true to test if, for example, // "/a/b" matches the start of "/*/b/*/d" // Partial means, if you run out of file before you run // out of pattern, then that's fine, as long as all // the parts match. bool MatchOne(IList <string> file, IList <ParseItem> pattern, bool partial) { //if (options.debug) { // console.error("matchOne", // { "this": this // , file: file // , pattern: pattern }) //} if (options.MatchBase && pattern.Count == 1) { file = new[] { file.Last(s => !String.IsNullOrEmpty(s)) }; } //if (options.debug) { // console.error("matchOne", file.length, pattern.length) //} int fi = 0, pi = 0; for (; (fi < file.Count) && (pi < pattern.Count); fi++, pi++) { //if (options.debug) { // console.error("matchOne loop") //} ParseItem p = pattern[pi]; string f = file[fi]; //if (options.debug) { // console.error(pattern, p, f) //} // should be impossible. // some invalid regexp stuff in the set. if (p == null) { return(false); } if (p is GlobStar) { //if (options.debug) // console.error('GLOBSTAR', [pattern, p, f]) // "**" // a/**/b/**/c would match the following: // a/b/x/y/z/c // a/x/y/z/b/c // a/b/x/b/x/c // a/b/c // To do this, take the rest of the pattern after // the **, and see if it would match the file remainder. // If so, return success. // If not, the ** "swallows" a segment, and try again. // This is recursively awful. // // a/**/b/**/c matching a/b/x/y/z/c // - a matches a // - doublestar // - matchOne(b/x/y/z/c, b/**/c) // - b matches b // - doublestar // - matchOne(x/y/z/c, c) -> no // - matchOne(y/z/c, c) -> no // - matchOne(z/c, c) -> no // - matchOne(c, c) yes, hit int fr = fi, pr = pi + 1; if (pr == pattern.Count) { //if (options.debug) // console.error('** at the end') // a ** at the end will just swallow the rest. // We have found a match. // however, it will not swallow /.x, unless // options.dot is set. // . and .. are *never* matched by **, for explosively // exponential reasons. for (; fi < file.Count; fi++) { if (file[fi] == "." || file[fi] == ".." || (!options.Dot && !string.IsNullOrEmpty(file[fi]) && file[fi][0] == '.')) { return(false); } } return(true); } // ok, let's see if we can swallow whatever we can. while (fr < file.Count) { var swallowee = file[fr]; //if (options.debug) { // console.error('\nglobstar while', // file, fr, pattern, pr, swallowee) //} // XXX remove this slice. Just pass the start index. if (this.MatchOne(file.Skip(fr).ToList(), pattern.Skip(pr).ToList(), partial)) { //if (options.debug) // console.error('globstar found match!', fr, file.Count, swallowee) // found a match. return(true); } else { // can't swallow "." or ".." ever. // can only swallow ".foo" when explicitly asked. if (swallowee == "." || swallowee == ".." || (!options.Dot && swallowee[0] == '.')) { //if (options.debug) // console.error("dot detected!", file, fr, pattern, pr) break; } // ** swallows a segment, and continue. //if (options.debug) // console.error('globstar swallow a segment, and continue') fr++; } } // no match was found. // However, in partial mode, we can't say this is necessarily over. // If there's more *pattern* left, then if (partial) { // ran out of file // console.error("\n>>> no match, partial?", file, fr, pattern, pr) if (fr == file.Count) { return(true); } } return(false); } // something other than ** // non-magic patterns just have to match exactly // patterns with magic have been turned into regexps. if (!p.Match(f, options)) { return(false); } } // Note: ending in / means that we'll get a final "" // at the end of the pattern. This can only match a // corresponding "" at the end of the file. // If the file ends in /, then it can only match a // a pattern that ends in /, unless the pattern just // doesn't have any more for it. But, a/b/ should *not* // match "a/b/*", even though "" matches against the // [^/]*? pattern, except in partial mode, where it might // simply not be reached yet. // However, a/b/ should still satisfy a/* // now either we fell off the end of the pattern, or we're done. if (fi == file.Count && pi == pattern.Count) { // ran out of pattern and filename at the same time. // an exact hit! return(true); } else if (fi == file.Count) { // ran out of file, but still had pattern left. // this is ok if we're doing the match as part of // a glob fs traversal. return(partial); } else if (pi == pattern.Count) { // ran out of pattern, still have file left. // this is only acceptable if we're on the very last // empty segment of a file with a trailing slash. // a/* should match a/b/ var emptyFileEnd = (fi == file.Count - 1) && (file[fi] == ""); return(emptyFileEnd); } // should be unreachable. throw new InvalidOperationException("wtf?"); }
private Expression EvaluateConditionFunction(MethodCallExpression node) { var condition = _conditions.Last(); switch (node.Method.Name) { case nameof(Enumerable.Contains): EvaluateContainsMethod(node); return(node); case nameof(Tuple.Create) when node.Method.DeclaringType == typeof(Tuple): EvaluateCompositeColumn(node); return(node); case nameof(string.StartsWith) when node.Method.DeclaringType == typeof(string): Visit(node.Object); var startsWithArgument = node.Arguments[0]; var startString = (string)Expression.Lambda(startsWithArgument).Compile().DynamicInvoke(); var endString = startString + Utf8MaxValue; // Create 2 conditions, ie: WHERE col1 >= startString AND col2 < endString var column = condition.Column; condition.SetOperator(ExpressionType.GreaterThanOrEqual) .SetParameter(startString); condition = new BinaryConditionItem(); condition.SetColumn(column) .SetOperator(ExpressionType.LessThan) .SetParameter(endString); _conditions.Add(condition); return(node); case nameof(IComparable.CompareTo): // Allow comparison to zero condition.SetAsCompareTo(); Visit(node.Object); Visit(node.Arguments[0]); return(node); case nameof(Equals): Visit(node.Object); condition.SetOperator(ExpressionType.Equal); Visit(node.Arguments[0]); return(node); case nameof(CqlToken.Create) when node.Method.DeclaringType == typeof(CqlToken): case nameof(CqlFunction.Token) when node.Method.DeclaringType == typeof(CqlFunction): condition.SetFunctionName("token").AllowMultipleColumns().AllowMultipleParameters(); foreach (var argument in node.Arguments) { Visit(argument); } return(node); case nameof(CqlFunction.MaxTimeUuid): case nameof(CqlFunction.MinTimeUuid): condition.SetFunctionName(node.Method.Name.ToLowerInvariant()); Visit(node.Arguments[0]); return(node); } // Try to invoke to obtain the parameter value condition.SetParameter(Expression.Lambda(node).Compile().DynamicInvoke()); return(node); }
private bool MergeChunks(IList <TFChunk.TFChunk> oldChunks, CancellationToken ct) { if (oldChunks.IsEmpty()) { throw new ArgumentException("Provided list of chunks to merge is empty."); } var oldChunksList = string.Join("\n", oldChunks); if (oldChunks.Count < 2) { Log.Trace("SCAVENGING: Tried to merge less than 2 chunks, aborting: {oldChunksList}", oldChunksList); return(false); } var sw = Stopwatch.StartNew(); int chunkStartNumber = oldChunks.First().ChunkHeader.ChunkStartNumber; int chunkEndNumber = oldChunks.Last().ChunkHeader.ChunkEndNumber; var tmpChunkPath = Path.Combine(_db.Config.Path, Guid.NewGuid() + ".merge.scavenge.tmp"); Log.Trace("SCAVENGING: started to merge chunks: {oldChunksList}" + "\nResulting temp chunk file: {tmpChunkPath}.", oldChunksList, Path.GetFileName(tmpChunkPath)); TFChunk.TFChunk newChunk; try { newChunk = TFChunk.TFChunk.CreateNew(tmpChunkPath, _db.Config.ChunkSize, chunkStartNumber, chunkEndNumber, isScavenged: true, inMem: _db.Config.InMemDb, unbuffered: _db.Config.Unbuffered, writethrough: _db.Config.WriteThrough, initialReaderCount: _db.Config.InitialReaderCount, reduceFileCachePressure: _db.Config.ReduceFileCachePressure); } catch (IOException exc) { Log.ErrorException(exc, "IOException during creating new chunk for scavenging merge purposes. Stopping scavenging merge process..."); return(false); } try { var oldVersion = oldChunks.Any(x => x.ChunkHeader.Version != TFChunk.TFChunk.CurrentChunkVersion); var positionMapping = new List <PosMap>(); foreach (var oldChunk in oldChunks) { var lastFlushedPage = -1; TraverseChunkBasic(oldChunk, ct, result => { positionMapping.Add(WriteRecord(newChunk, result.LogRecord)); var currentPage = newChunk.RawWriterPosition / 4096; if (currentPage - lastFlushedPage > FlushPageInterval) { newChunk.Flush(); lastFlushedPage = currentPage; } }); } newChunk.CompleteScavenge(positionMapping); if (_unsafeIgnoreHardDeletes) { Log.Trace("Forcing merged chunk to be kept even if bigger."); } if (oldVersion) { Log.Trace("Forcing merged chunk to be kept as old chunk is a previous version."); } var chunk = _db.Manager.SwitchChunk(newChunk, verifyHash: false, removeChunksWithGreaterNumbers: false); if (chunk != null) { Log.Trace( "Merging of chunks:" + "\n{oldChunksList}" + "\ncompleted in {elapsed}." + "\nNew chunk: {tmpChunkPath} --> #{chunkStartNumber}-{chunkEndNumber} ({newChunk}).", oldChunksList, sw.Elapsed, Path.GetFileName(tmpChunkPath), chunkStartNumber, chunkEndNumber, Path.GetFileName(chunk.FileName)); var spaceSaved = oldChunks.Sum(_ => _.FileSize) - newChunk.FileSize; _scavengerLog.ChunksMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, spaceSaved); return(true); } else { Log.Trace( "Merging of chunks:" + "\n{oldChunksList}" + "\ncompleted in {elapsed}." + "\nBut switching was prevented for new chunk: #{chunkStartNumber}-{chunkEndNumber} ({tmpChunkPath}).", oldChunksList, sw.Elapsed, chunkStartNumber, chunkEndNumber, Path.GetFileName(tmpChunkPath)); _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Chunk switch prevented."); return(false); } } catch (FileBeingDeletedException exc) { Log.Info( "Got FileBeingDeletedException exception during scavenge merging, that probably means some chunks were re-replicated." + "\nMerging of following chunks will be skipped:" + "\n{oldChunksList}" + "\nStopping merging and removing temp chunk '{tmpChunkPath}'..." + "\nException message: {e}.", oldChunksList, tmpChunkPath, exc.Message); newChunk.Dispose(); DeleteTempChunk(tmpChunkPath, MaxRetryCount); _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, exc.Message); return(false); } catch (OperationCanceledException) { Log.Info("Scavenging cancelled at:" + "\n{oldChunksList}", oldChunksList); newChunk.MarkForDeletion(); _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, "Scavenge cancelled"); return(false); } catch (Exception ex) { Log.Info("Got exception while merging chunk:" + "\n{oldChunks}" + "\nException: {e}", oldChunks, ex.ToString() ); newChunk.Dispose(); DeleteTempChunk(tmpChunkPath, MaxRetryCount); _scavengerLog.ChunksNotMerged(chunkStartNumber, chunkEndNumber, sw.Elapsed, ex.Message); return(false); } }
private async Task ProcessMultiKeySelectionResult( IList <Timestamped <PointAndKeyValue> > pointsAndKeyValues, TriggerSignal startSelectionTriggerSignal) { Log.DebugFormat("Multi-key selection captured a set of '{0}' PointAndKeyValues.", pointsAndKeyValues.Count); RequestSuspend(); //Pause everything (i.e. processing new points) while we perform the (CPU bound) word matching try { if (pointsAndKeyValues.Any()) { var timeSpan = pointsAndKeyValues.Last().Timestamp.Subtract(pointsAndKeyValues.First().Timestamp); var sequenceThreshold = (int)Math.Round( ((double)pointsAndKeyValues.Count / (double)timeSpan.TotalMilliseconds) * Settings.Default.MultiKeySelectionFixationMinDwellTime.TotalMilliseconds); Log.DebugFormat( "Multi-key selection capture lasted {0}ms. Minimum dwell time is {1}ms, or {2} points.", timeSpan.TotalMilliseconds, Settings.Default.MultiKeySelectionFixationMinDwellTime.TotalMilliseconds, sequenceThreshold); //Always assume the start trigger is reliable if it occurs on a letter string reliableFirstLetter = startMultiKeySelectionTriggerSignal != null && startMultiKeySelectionTriggerSignal.Value.PointAndKeyValue != null && startMultiKeySelectionTriggerSignal.Value.PointAndKeyValue.StringIsLetter ? startMultiKeySelectionTriggerSignal.Value.PointAndKeyValue.String : null; Log.DebugFormat( "First letter ('{0}') of multi-key selection capture {1} reliable.", reliableFirstLetter, reliableFirstLetter != null ? "IS" : "IS NOT"); //If we are using a fixation trigger and the stop trigger has occurred on a letter then it is reliable - use it string reliableLastLetter = selectionTriggerSource is IFixationTriggerSource && stopMultiKeySelectionTriggerSignal != null && stopMultiKeySelectionTriggerSignal.Value.PointAndKeyValue != null && stopMultiKeySelectionTriggerSignal.Value.PointAndKeyValue.StringIsLetter ? stopMultiKeySelectionTriggerSignal.Value.PointAndKeyValue.String : null; Log.DebugFormat( "Last letter ('{0}') of multi-key selection capture {1} reliable.", reliableLastLetter, reliableLastLetter != null ? "IS" : "IS NOT"); if (reliableLastLetter != null) { Log.Debug("Publishing selection event on last letter of multi-key selection capture."); PublishSelection(stopMultiKeySelectionTriggerSignal.Value.PointAndKeyValue); } //Why am I wrapping this call in a Task.Run? Internally the MapCaptureToEntries method uses PLINQ which also blocks the UI thread - this frees it up. //This cannot be done inside the MapCaptureToEntries method as the method takes a ref param, which cannot be used inside an anonymous delegate or lambda. //The method cannot be made awaitable as async/await also does not support ref params. Tuple <List <Point>, KeyValue, List <string> > result = null; await Task.Run(() => { result = dictionaryService.MapCaptureToEntries( pointsAndKeyValues.ToList(), sequenceThreshold, reliableFirstLetter, reliableLastLetter, ref mapToDictionaryMatchesCancellationTokenSource, exception => PublishError(this, exception)); }); if (result != null) { if (result.Item2 == null && (result.Item3 == null || !result.Item3.Any())) { //Nothing useful in the result - play error message. Publish anyway as the points can be rendered in debugging mode. audioService.PlaySound(Settings.Default.ErrorSoundFile, Settings.Default.ErrorSoundVolume); } PublishSelectionResult(result); } } } finally { RequestResume(); } }
void RecursiveCopy(IActivityIOOperationsEndPoint src, IActivityIOOperationsEndPoint dst, IDev2CRUDOperationTO args) { try { var srcContentsFolders = src.ListFoldersInDirectory(src.IOPath); // TODO: should not do parallel io if the operations are on the same physical disk? check type of OperationsEndpoint first? delegate via polymorphism to the endpoints? Task.WaitAll(srcContentsFolders.Select(sourcePath => Task.Run(() => { var sourceEndPoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(sourcePath); IList <string> dirParts = sourceEndPoint.IOPath.Path.Split(sourceEndPoint.PathSeperator().ToCharArray(), StringSplitOptions.RemoveEmptyEntries); var destinationPath = ActivityIOFactory.CreatePathFromString(dst.Combine(dirParts.Last()), dst.IOPath.Username, dst.IOPath.Password, true, dst.IOPath.PrivateKeyFile); var destinationEndPoint = ActivityIOFactory.CreateOperationEndPointFromIOPath(destinationPath); dst.CreateDirectory(destinationPath, args); TransferDirectoryContents(sourceEndPoint, destinationEndPoint, args); })).ToArray()); } catch (AggregateException e) { var message = e.InnerExceptions.Where(exception => !string.IsNullOrEmpty(exception?.Message)).Aggregate(string.Empty, (current, exception) => current + exception.Message + "\r\n"); throw new Exception(message, e); } }
ExpressionWithResolveResult HandleAccessorCall(ExpectedTargetDetails expectedTargetDetails, IMethod method, TranslatedExpression target, IList <TranslatedExpression> arguments) { bool requireTarget = expressionBuilder.HidesVariableWithName(method.AccessorOwner.Name) || (method.IsStatic ? !expressionBuilder.IsCurrentOrContainingType(method.DeclaringTypeDefinition) : !(target.Expression is ThisReferenceExpression)); bool targetCasted = false; var targetResolveResult = requireTarget ? target.ResolveResult : null; IMember foundMember; while (!IsUnambiguousAccess(expectedTargetDetails, targetResolveResult, method, out foundMember)) { if (!requireTarget) { requireTarget = true; targetResolveResult = target.ResolveResult; } else if (!targetCasted) { targetCasted = true; target = target.ConvertTo(method.AccessorOwner.DeclaringType, expressionBuilder); targetResolveResult = target.ResolveResult; } else { foundMember = method.AccessorOwner; break; } } var rr = new MemberResolveResult(target.ResolveResult, foundMember); if (method.ReturnType.IsKnownType(KnownTypeCode.Void)) { var value = arguments.Last(); arguments.Remove(value); TranslatedExpression expr; if (arguments.Count != 0) { expr = new IndexerExpression(target.Expression, arguments.Select(a => a.Expression)) .WithoutILInstruction().WithRR(rr); } else if (requireTarget) { expr = new MemberReferenceExpression(target.Expression, method.AccessorOwner.Name) .WithoutILInstruction().WithRR(rr); } else { expr = new IdentifierExpression(method.AccessorOwner.Name) .WithoutILInstruction().WithRR(rr); } var op = AssignmentOperatorType.Assign; var parentEvent = method.AccessorOwner as IEvent; if (parentEvent != null) { if (method.Equals(parentEvent.AddAccessor)) { op = AssignmentOperatorType.Add; } if (method.Equals(parentEvent.RemoveAccessor)) { op = AssignmentOperatorType.Subtract; } } return(new AssignmentExpression(expr, op, value.Expression).WithRR(new TypeResolveResult(method.AccessorOwner.ReturnType))); } else { if (arguments.Count != 0) { return(new IndexerExpression(target.Expression, arguments.Select(a => a.Expression)) .WithoutILInstruction().WithRR(rr)); } else if (requireTarget) { return(new MemberReferenceExpression(target.Expression, method.AccessorOwner.Name) .WithoutILInstruction().WithRR(rr)); } else { return(new IdentifierExpression(method.AccessorOwner.Name) .WithoutILInstruction().WithRR(rr)); } } }
#pragma warning disable S3776 // Cognitive Complexity of methods should not be too high private void ExecuteConcreteAction(IList <OutputTO> outputs, WarewolfListIterator colItr, SharepointSource sharepointSource, WarewolfIterator inputItr) #pragma warning restore S3776 // Cognitive Complexity of methods should not be too high { var path = colItr.FetchNextValue(inputItr); if (DataListUtil.IsValueRecordset(Result) && DataListUtil.GetRecordsetIndexType(Result) != enRecordsetIndexType.Numeric) { if (DataListUtil.GetRecordsetIndexType(Result) == enRecordsetIndexType.Star) { var recsetName = DataListUtil.ExtractRecordsetNameFromValue(Result); var fieldName = DataListUtil.ExtractFieldNameFromValue(Result); if (IsFoldersSelected) { AddAllFolders(outputs, sharepointSource, path, recsetName, fieldName); } if (IsFilesSelected) { AddAllFiles(outputs, sharepointSource, path, recsetName, fieldName); } if (IsFilesAndFoldersSelected) { AddAllFilesAndFolders(outputs, sharepointSource, path, recsetName, fieldName); } } else { AddBlankIndexDebugOutputs(outputs, sharepointSource, path); } } else { if (IsFoldersSelected) { var folders = GetSharePointFolders(sharepointSource, path); var xmlList = string.Join(",", folders.Select(c => c)); outputs.Add(DataListFactory.CreateOutputTO(Result)); outputs.Last().OutputStrings.Add(xmlList); } if (IsFilesSelected) { var files = GetSharePointFiles(sharepointSource, path); var xmlList = string.Join(",", files.Select(c => c)); outputs.Add(DataListFactory.CreateOutputTO(Result)); outputs.Last().OutputStrings.Add(xmlList); } if (IsFilesAndFoldersSelected) { var folderAndPathList = new List <string>(); folderAndPathList.AddRange(GetSharePointFiles(sharepointSource, path)); folderAndPathList.AddRange(GetSharePointFolders(sharepointSource, path)); var xmlList = string.Join(",", folderAndPathList.Select(c => c)); outputs.Add(DataListFactory.CreateOutputTO(Result)); outputs.Last().OutputStrings.Add(xmlList); } } }
private void Insert(Cuboid cuboid, ShelfChoiceHeuristic method) { // Check is overweight if (cuboid.Weight + _packedCuboids.Sum(x => x.Weight) > _parameter.BinWeight) { return; } switch (method) { case ShelfChoiceHeuristic.ShelfNextFit: PutOnShelf(_shelves.Last(), cuboid); if (cuboid.IsPlaced) { AddToShelf(_shelves.Last(), cuboid); return; } break; case ShelfChoiceHeuristic.ShelfFirstFit: foreach (var shelf in _shelves) { PutOnShelf(shelf, cuboid); if (cuboid.IsPlaced) { AddToShelf(shelf, cuboid); return; } } break; default: throw new NotSupportedException($"shelf choice is unsupported: {method}"); } // The rectangle did not fit on any of the shelves. Open a new shelf. // Sort edges in decreasing order var edges = new List <decimal>() { cuboid.Width, cuboid.Height, cuboid.Depth }; edges.Sort(); var max = edges[2]; var middle = edges[1]; var min = edges[0]; var whdSet = new[] { new { w = middle, h = max, d = min }, new { w = max, h = middle, d = min }, new { w = middle, h = min, d = max } }; foreach (var whd in whdSet) { if (_parameter.AllowRotateVertically || cuboid.Height == whd.h) { cuboid.Width = whd.w; cuboid.Height = whd.h; cuboid.Depth = whd.d; if (CanStartNewShelf(cuboid.Height)) { StartNewShelf(cuboid.Height); PutOnShelf(_shelves.Last(), cuboid); if (cuboid.IsPlaced) { AddToShelf(_shelves.Last(), cuboid); return; } } } } // The rectangle didn't fit. }
public async Task PersistAsync(IList <DecodedMessage> messages) { Interlocked.Increment(ref _inProgress); if (ShowMessages.PrintDbPersist) { Console.WriteLine($" Persisted batch of {messages.Count} with Counters from {messages.First().Counter} to {messages.Last().Counter}. In Progress: {_inProgress} on thread {Thread.CurrentThread.ManagedThreadId}"); } await Task.Delay(10000); Interlocked.Decrement(ref _inProgress); }