public void SetMinMax(IList<Point> points) { if (_isMaxMinSet) { var minX = points.Min(point => point.X); MinX = minX < MinX ? minX : MinX; var maxX = points.Max(point => point.X); MaxX = maxX > MaxX ? maxX : MaxX; var minY = points.Min(point => point.Y); MinY = minY < MinY ? minY : MinY; var maxY = points.Max(point => point.Y); MaxY = maxY > MaxY ? maxY : MaxY; } else { MinX = points.Min(point => point.X); MaxX = points.Max(point => point.X); MinY = points.Min(point => point.Y); MaxY = points.Max(point => point.Y); _isMaxMinSet = true; } SetOriginPoint(GetDelta()); }
/// <summary> /// Fill weight and url for each tag. /// </summary> /// <param name="tagCounts">The tags to fill.</param> private void fillTagCounts(IList<TagCount> tagCounts) { int maxCount = tagCounts.Max(t => t.Total); foreach (TagCount tagCount in tagCounts) { double count = (double)tagCount.Total; double percent = (count / maxCount) * 100; if (percent < 20) { tagCount.Weight = 1; } else if (percent < 40) { tagCount.Weight = 2; } else if (percent < 60) { tagCount.Weight = 3; } else if (percent < 80) { tagCount.Weight = 4; } else { tagCount.Weight = 5; } tagCount.Url = UrlBuilder.GetTagUrl(tagCount.UrlSlug); } }
/// <summary> /// The default implementation which creates a work array from 0 to max /// Only works with positive numbers /// </summary> public static int[] Sort(IList<int> data, int max) { Contract.Requires(data != null && Contract.ForAll(data, i => i >= 0)); Contract.Requires(max >= 0 && max == data.Max()); int[] count = new int[max + 1]; int[] sorted = new int[data.Count]; // Add the elements in order small --> big foreach (int key in data) { count[key] += 1; } // Transfer the temp array to the sorted array from small --> big int sortedIndex = 0; for (int number = 0; number < count.Length; number++) { // How often did we have this number in the input? for (int j = 0; j < count[number]; j++) { sorted[sortedIndex++] = number; } } return sorted; }
/// <summary> /// Converts a List of string arrays to a string where each element in each line is correctly padded. /// Make sure that each array contains the same amount of elements! /// - Example without: /// Title Name Street /// Mr. Roman Sesamstreet /// Mrs. Claudia Abbey Road /// - Example with: /// Title Name Street /// Mr. Roman Sesamstreet /// Mrs. Claudia Abbey Road /// <param name="lines">List lines, where each line is an array of elements for that line.</param> /// <param name="padding">Additional padding between each element (default = 1)</param> /// </summary> /// <see cref="http://stackoverflow.com/questions/4449021/how-can-i-align-text-in-columns-using-console-writeline"/> public static string PadElementsInLines(IList<string[]> lines, int padding = 1) { // Calculate maximum numbers for each element accross all lines var numElements = lines[0].Length; var maxValues = new int[numElements]; for (int i = 0; i < numElements; i++) { maxValues[i] = lines.Max(x => x[i].Length) + padding; } var sb = new StringBuilder(); // Build the output bool isFirst = true; foreach (var line in lines) { if (!isFirst) { sb.AppendLine(); } isFirst = false; for (int i = 0; i < line.Length; i++) { var value = line[i]; // Append the value with padding of the maximum length of any value for this element sb.Append(value.PadRight(maxValues[i])); } } return sb.ToString(); }
private FeedResult FeedResult(IList<SyndicationItem> items) { var settings = Settings.GetSettings<FunnelWebSettings>(); Debug.Assert(Request.GetOriginalUrl() != null, "Request.GetOriginalUrl() != null"); var baseUri = Request.GetOriginalUrl(); var feedUrl = new Uri(baseUri, Url.Action("Recent", "Wiki")); return new FeedResult( new Atom10FeedFormatter( new SyndicationFeed(settings.SiteTitle, settings.SearchDescription, feedUrl, items) { Id = baseUri.ToString(), Links = { new SyndicationLink(baseUri) { RelationshipType = "self" } }, LastUpdatedTime = items.Count() == 0 ? DateTime.Now : items.First().LastUpdatedTime }), items.Max(i => i.LastUpdatedTime.LocalDateTime)) { ContentType = "application/atom+xml" }; }
public void Initialize() { _data = new List<StoredProtocol> { new StoredProtocol {Id = 1, StudyName = "StudyName" }, // TODO add pseudo data new StoredProtocol {Id = 2, StudyName = "StudyName" } }; _mockSet = MockUtility.CreateAsyncMockDbSet(_data, u => u.Id); var mockContext = new Mock<IAutoSysContext>(); mockContext.Setup(s => s.Protocols).Returns(_mockSet.Object); mockContext.Setup(s => s.Set<StoredProtocol>()).Returns(_mockSet.Object); mockContext.Setup(s => s.SaveChangesAsync()).Returns(Task.Run(() => { // Increment protocol ids automatically based on existing ids in mock data var max = _data.Max(u => u.Id); foreach (var protocol in _data.Where(u => u.Id == 0)) { protocol.Id = ++max; } return 1; // SaveChangesAsync returns number based on how many times it was called per default })); _context = mockContext; _repository = new ProtocolRepository(_context.Object); }
public int CalculateDeceitfulWar(IList<double> first, IList<double> second) { if (first.Count == 0) return 0; double min1 = first.Min(); double min2 = second.Min(); bool win = false; double play1; double play2; if (min1 < min2) { play1 = min1; play2 = second.Max(); } else { play1 = min1; play2 = min2; win = true; } var newfirst = first.Where(n => n != play1).ToList(); var newsecond = second.Where(n => n != play2).ToList(); return CalculateDeceitfulWar(newfirst, newsecond) + (win ? 1 : 0); }
/// <summary> /// Convert numbers to spark chart strings. /// </summary> /// <param name="data">List or Array of numbers</param> /// <returns>empty string if <paramref name="data"/>is <code>null</code> or empty.</returns> public static String Render(IList<double> data) { var ticks = TickProvider.Ticks; if (data == null || data.Count == 0) return string.Empty; char[] res = new char[data.Count()]; double min = data.Min(); double max = data.Max(); double step = (max - min) / (ticks.Length - 1); if (step.Equals(0d)) step = 1; for (var i = 0; i < data.Count(); i++) { var val = data[i]; double d = (val - min)/step; // if it's 10^-10 close to its rounded, round; floor otherwise int tick = (int) ((Math.Abs(Math.Round(d) - d) < 1e-10) ? Math.Round(d) : Math.Floor((val - min) / step)); res[i] = ticks[tick]; } return new string(res); }
public IEnumerable<MatchModel> GetMatchesForNextRound(IList<PlayerModel> players, IList<MatchModel> previousMatches) { var matchesForNextRound = new List<MatchModel>(); int round = previousMatches.Count == 0 ? 1 : previousMatches.Max(p => p.Round) + 1; List<PlayerModel> playerModels = _playersHelper.GetPlayersWithCalculatedRating(players, previousMatches) .OrderByDescending(p => p.Rate) .ToList(); while (playerModels.Count > 0) { PlayerModel player = playerModels.First(); playerModels.Remove(player); MatchModel matchModel = _matchesHelper.GetMatchModelForNextRound(player, playerModels, previousMatches); matchModel.Round = round; matchModel.Player1PlaysWhite = _playersHelper.PlayerCanPlayWithDefinedColor(player, ChessColor.White, previousMatches); matchModel.MatchStartTime = DateTime.Now; matchModel.Winner = new PlayerModel { PlayerId = SpecialPlayerIds.WinnerIdForGameWithUndefinedResult }; playerModels.Remove(matchModel.Player2); matchesForNextRound.Add(matchModel); } return matchesForNextRound; }
public void Initialize() { _data = new List<StoredUser> { new StoredUser {Id = 1, Name = "William Parker", MetaData = "Researcher"}, new StoredUser {Id = 2, Name = "Trudy Jones", MetaData = "Researcher"} }; _mockSet = MockUtility.CreateAsyncMockDbSet(_data, u => u.Id); var mockContext = new Mock<IAutoSysContext>(); mockContext.Setup(s => s.Users).Returns(_mockSet.Object); mockContext.Setup(s => s.Set<StoredUser>()).Returns(_mockSet.Object); mockContext.Setup(s => s.SaveChangesAsync()).Returns(Task.Run(() => { // Increment user ids automatically based on existing ids in mock data var max = _data.Max(u => u.Id); foreach (var user in _data.Where(u => u.Id == 0)) { user.Id = ++max; } return 1; // SaveChangesAsync returns number based on how many times it was called per default })); _context = mockContext; _repository = new UserRepository(_context.Object); }
private static int ResolveNormal(IList<int> pancakes) { int maxvalue = pancakes.Max(); int minvalue = pancakes.Min(); if (minvalue == maxvalue) return maxvalue; int secondmaxvalue = pancakes.Where(n => n != maxvalue).Max(); IList<int> newpancakes = new List<int>(); int steps = maxvalue - secondmaxvalue; foreach (int value in pancakes) { if (value == maxvalue) newpancakes.Add(secondmaxvalue); else if (value > steps) newpancakes.Add(value - steps); } if (newpancakes.Count == 0) return 1; return Resolve(newpancakes) + steps; }
private static int ResolveSpecial(IList<int> pancakes) { IList<int> newpancakes = new List<int>(); int maxvalue = pancakes.Max(); if (maxvalue == 1) return 1; int split = maxvalue / 2; int steps = pancakes.Count(n => n == maxvalue); foreach (int value in pancakes) if (value == maxvalue) newpancakes.Add(value - split); else newpancakes.Add(value); for (int k = 0; k < steps; k++) newpancakes.Add(split); int ncount1 = Resolve(newpancakes); return steps + Resolve(newpancakes); }
/// <summary> /// Generates intervals based on type, numberOfClasses and values. /// </summary> /// <param name="values"></param> /// <param name="type"></param> /// <param name="numberOfClasses"></param> /// <returns></returns> public static IList<Interval> GetIntervalsForNumberOfClasses(IList<Single> values, QuantityThemeIntervalType type, int numberOfClasses) { int index = 0; var intervals = new List<Interval>(); var lowValue = values.Min(); var highValue = values.Max(); if (type == QuantityThemeIntervalType.NaturalBreaks) { ArrayList.Adapter((IList)values).Sort(); // performance, works 20% faster than layerAttribute.AttributeValues.Sort(); } for (int i = 0; i < numberOfClasses; i++) { float intervalMin; float intervalMax; if (type == QuantityThemeIntervalType.EqualIntervals) { intervalMin = lowValue + i * ((highValue - lowValue) / numberOfClasses); intervalMax = lowValue + (i + 1) * ((highValue - lowValue) / numberOfClasses); } else { intervalMin = Convert.ToSingle(values[index]); index = (int)Math.Ceiling((double)(i + 1) / numberOfClasses * (values.Count - 1)); intervalMax = Convert.ToSingle(values[index]); } var interval = new Interval(intervalMin, intervalMax); intervals.Add(interval); } return intervals; }
/// <summary> /// I feel like this way is a little bit more optimized since it saves space /// there is still the possibility of empty space beetwen min,max /// but atleast it gets rid of the space from 0 to min /// /// This will also work with negative numbers. /// </summary> public static int[] Sort(IList<int> data, int min, int max) { Contract.Ensures(data != null, "data cannot be a null pointer"); Contract.Ensures(min == data.Min(), "wrong min submitted"); Contract.Ensures(max == data.Max(), "wrong max submitted"); int[] count = new int[(max - min) + 1]; int[] sorted = new int[data.Count]; // Add the elements in reverse order big --> small // NOTE: Could do key - min instead for order small --> big foreach (int key in data) { count[max - key] += 1; } // Transfer the temp array to the sorted array from small --> big int sortedIndex = data.Count - 1; for (int i = 0; i < count.Length; i++) { // How often did we have this number in the input? int number = max - i; for (int j = 0; j < count[i]; j++) { sorted[sortedIndex--] = number; } } return sorted; }
static void DisplayStats(IList<Response> responses) { if (!responses.Any()) { return; } var average = responses.Average(x => (x.EndUtc - x.StartUtc).TotalMilliseconds); ClearLine(); Console.WriteLine("Average time: {0} ms", Convert.ToInt32(average)); var min = responses.Min(x => x.StartUtc); var max = responses.Max(x => x.EndUtc); var count = responses.Count; var timespan = Convert.ToInt32((max - min).TotalMilliseconds); timespan = timespan == 0 ? 0 : timespan / 1000; var rps = timespan == 0 ? 0 : count / timespan; ClearLine(); Console.WriteLine("Performance: {0} rps ({1} reqs in {2})", Convert.ToInt32(rps), responses.Count, timespan); ClearLine(); Console.WriteLine("Threads: {0}", responses.Select(x => x.TaskId).Distinct().Count()); ClearLine(); Console.WriteLine("Errors: {0}", responses.Count(x => !x.Success)); }
/// <summary> /// Initializes a new instance of the <see cref="Hull"/> class. /// </summary> /// <param name="points">The points.</param> public Hull(IList<Vector2> points) { var numVertices = points.Count * 2; var numTris = numVertices - 2; var numIndicies = numTris * 3; this.vertices = new HullVertex[numVertices]; this.indices = new int[numIndicies]; this.scale = Vector2.One; for (var i = 0; i < points.Count; i++) { var p1 = points[i]; var p2 = points[(i + 1) % points.Count]; var normal = (p2 - p1).Clockwise(); normal.Normalize(); this.vertices[i * 2] = new HullVertex(p1, normal, new Color(0, 0, 0, 0.1f)); this.vertices[(i * 2) + 1] = new HullVertex(p2, normal, new Color(0, 0, 0, 0.1f)); } for (var i = 0; i < numTris; i++) { this.indices[i * 3] = 0; this.indices[(i * 3) + 1] = i + 1; this.indices[(i * 3) + 2] = i + 2; } this.RadiusSquared = points.Max(x => x.LengthSquared()); }
public ProjectRepository(IList<Project> projects) { this.projects = projects; if (projects.Count > 0) this.maxid = projects.Max(p => p.Id); }
public override void Draw( IList<Point> points, double thickness, int miterLimit, ILogicalToScreenMapper logicalToScreenMapper) { var pointsCount = points.Count; var firstPoint = points[0]; var lastPoint = points[pointsCount - 1]; var highestValue = _useHighest ? points.Max(it => it.Y) : 0; var lowestValue = _useLowest ? points.Min(it => it.Y) : 0; double baselineValue; // Auf gleiche Höhe bringen if (_useBaseline) baselineValue = logicalToScreenMapper.MapY(_baselineValue); else if (_useHighest) baselineValue = highestValue; else if (_useLowest) baselineValue = lowestValue; else { baselineValue = firstPoint.Y > lastPoint.Y ? lastPoint.Y : firstPoint.Y; } using (var dc = RenderOpen()) { for (var i = 1; i < points.Count; i++) { var previousPoint = points[i - 1]; var currentPoint = points[i]; var previousY = previousPoint.Y; // -1 weil: 1 Pixel nach links verschieben bewirkt, dass die Löcher zwischen den Rechtecken nicht mehr auftauchen // nicht mehr nach 1 verschieben, weil der Fall bei Kriko (kleine Löcher in den Kurven) sowieso nicht vorkommt // var previousX = previousPoint.X - 1; var previousX = previousPoint.X; // Rect kann mit negativen Höhen nicht umgehen, deshalb die komischen Expressions var height = previousY > baselineValue ? previousY - baselineValue : baselineValue - previousY; var y = previousY > baselineValue ? baselineValue : previousY; var rectangle = new Rect( previousX, height == 0 ? y - 1 : y, // Linie um 1 nach oben verschieben, damit die Linien nicht optisch nach unten versetzt sind currentPoint.X - previousX, height == 0d ? 1 : height); dc.DrawRectangle( Brushes.White, new Pen( Brushes.White, 0) // brauchen wir nicht mehr (siehe oben - height == 0): thickness == 1 ? 0.25 : thickness) { MiterLimit = miterLimit }, rectangle); } } }
public IList<WordIntPair> GetFontSizes(IList<WordIntPair> wordsAndFreqs, InputOptions options) { int minCount = wordsAndFreqs.Min(t => t.Number); int maxCount = wordsAndFreqs.Max(t => t.Number); return wordsAndFreqs.Select(tuple => new WordIntPair( tuple.Word, CountFont(tuple.Number, options.MaxFont, options.MinFont, minCount, maxCount))) .ToList(); }
// The idea is to have a page dictionary in memory that holds SinceId and MaxId for every page // and use them as page borders so we can tell the twitter API // form which to which Id to load the tweets for the specific page private void SetPageTweetBorders(int pageIndex, IList <TweetDto> tweetsResult) { var pages = new Dictionary <int, PageTweetBorders>(); string pagesKey = $"tweet-pages"; if (TempData.ContainsKey(pagesKey)) { pages = JsonConvert.DeserializeObject <Dictionary <int, PageTweetBorders> >(TempData[pagesKey].ToString()); } if (!pages.ContainsKey(pageIndex)) { pages.Add(pageIndex, new PageTweetBorders() { Page = pageIndex, MaxId = tweetsResult?.Max(tweet => tweet.Id) ?? 0, SinceId = tweetsResult?.Min(tweet => tweet.Id) ?? 0 }); } string pagesJson = JsonConvert.SerializeObject(pages); if (TempData.ContainsKey(pagesKey)) { TempData[pagesKey] = pagesJson; } else { TempData.Add(pagesKey, pagesJson); } }
public IList<Department> RotatetDepartmentList(IList<Department> departments) { if (departments == null) { return new List<Department>(); } if (departments.Count == 0) { return departments; } int lastSortOrder = departments.Max(d => d.SortOrder); Department department = FindDepartmentWithMinSortOrder(departments); if (department != null) { department.SortOrder = lastSortOrder + 1; } departments = (from d in departments orderby d.SortOrder select d).ToList(); int sortOrder = 1; foreach (Department dep in departments) { dep.SortOrder = sortOrder; sortOrder++; } return departments; }
private void AdjustSize(IList<Img> imgs) { var maxWidht = imgs.Max(x => x.Image.Width); var maxHeight = imgs.Max(x => x.Image.Height); foreach (var bitmap in imgs) { if (bitmap.Image.Width < maxHeight || bitmap.Image.Height < maxHeight) { var dw = maxWidht - bitmap.Image.Width; var dh = maxHeight - bitmap.Image.Height; var wo = GetOffsets(dw); var ho = GetOffsets(dh); bitmap.Resize(maxWidht, maxHeight, wo, ho); } } }
public override void Execute(object parameter) { var maxCount = _inventory?.Max(x => x.CarId) ?? 0; _inventory?.Add(new Car { CarId = ++maxCount, Color = "Yellow", Make = "VW", PetName = "Birdie", IsChanged = false }); }
private void BtnAddCar_OnClick(object sender, RoutedEventArgs e) { var maxCount = _cars?.Max(x => x.CarId) ?? 0; _cars?.Add(new Inventory { CarId = ++maxCount, Color = "Yellow", Make = "VW", PetName = "Birdie" }); }
public AggregationOperationResult Do(IList<UniversalValue> input) { if (input == null) throw new ArgumentNullException("input"); AggregationOperationResult result = new AggregationOperationResult(); result.Add("max", input.Max()); return result; }
public AggregationOperationResult Do(IList<double> input) { if (input == null) throw new ArgumentNullException("input"); if (!input.Any()) throw new InvalidOperationException("No elements to aggregate"); return new AggregationOperationResult(AggregationType.Max, input.Max()); }
public ContentExpectationWriter(IList<string> expectations, IList<string> actuals) { _expectations = expectations; _actuals = actuals; _leftLength = actuals.Max(x => x.Length); _startOfRightColumn = _leftLength + 10; _rightLength = _expectations.Max(x => x.Length); }
/// public IList<string> ToOriginalID(IList<int> internal_id_list) { MaxEntityID = Math.Max(MaxEntityID, internal_id_list.Max()); var original_ids = new string[internal_id_list.Count]; for (int i = 0; i < internal_id_list.Count; i++) original_ids[i] = internal_id_list[i].ToString(); return original_ids; }
public CarCalculatorSummary(IList<FuelEntryModel> fuelEntries) { HasData = fuelEntries != null && fuelEntries.Any(); if (!HasData) return; MinAmountOf100Km = fuelEntries.Min(i => i.AmountOfFuelPer100Km); MaxAmountOf100Km = fuelEntries.Max(i => i.AmountOfFuelPer100Km); MinCost1Km = fuelEntries.Min(i => i.CostOf1Km); MaxCost1Km = fuelEntries.Max(i => i.CostOf1Km); SumDistance = fuelEntries.Sum(i => i.CurrentDistance); SumAmountOfFuel = fuelEntries.Sum(i => i.AmountOfFuel); SumTotalCost = fuelEntries.Sum(i => i.GasPrice * i.AmountOfFuel); var minDate = fuelEntries.Min(i => i.Date); var maxDate = fuelEntries.Max(i => i.Date); AverageFuel = (decimal)((maxDate - minDate).TotalDays/fuelEntries.Count); }
private double GetMaxRightFromChildren(IList<ICanvasItem> items) { if (!items.Any()) { return double.NaN; } var right = items.Max(item => item.Left + item.Width); var width = right; return width; }
public static string GetDispatcherParameters(IList<MethodDeclaration> methodDeclarations, string argumentPrefix) { var argumentNumber = 0; if (methodDeclarations.Count == 0 || methodDeclarations.Max(x => x.Arguments.Count) == 0) { return string.Format("{0}{1}?: any", argumentPrefix, argumentNumber); } return methodDeclarations .First(x => x.Arguments.Count == methodDeclarations.Max(y => y.Arguments.Count)) .Arguments .Select(x => string.Format("{0}{1}?: any", argumentPrefix, argumentNumber++)) .Aggregate((x, y) => x + ", " + y); }
public double Score(IList<Tuple<double, double>> spectrumPeaks, IEnumerable<Tuple<double, double, double>> matchedFragments, double compoundTotalBondEnergy) { if (!spectrumPeaks.Any()) { return 0; } var maxSpectrumIntensity = spectrumPeaks.Max(p => p.Item2); var matchedTotalWeightedIntensity = matchedFragments.Sum(f => WeightedIntensity(f.Item1, f.Item2, f.Item3, compoundTotalBondEnergy, maxSpectrumIntensity)); var spectrumTotalWeightedIntensity = spectrumPeaks.Sum(t => WeightedIntensity(t.Item1, t.Item2, 0, compoundTotalBondEnergy, maxSpectrumIntensity)); var score = spectrumTotalWeightedIntensity > 0 ? matchedTotalWeightedIntensity / spectrumTotalWeightedIntensity * 100 : 0; return score; }
public override void Execute(object parameter) { var maxCount = _books?.Max(x => x?.BookId) ?? 0; _books?.Add( new Inventory { BookId = ++maxCount, Author = (_mainWindow.tbAuthor.Text == "") ? "Без автора" : _mainWindow.tbAuthor.Text, BookName = (_mainWindow.tbBookName.Text == "") ? "Без названия" : _mainWindow.tbBookName.Text, ReadStatus = _mainWindow.cbxReadStatus.IsChecked ?? false }); }
public LineData(IList<double> x, IList<double> y) { m_x = x; m_y = y; Count = x.Count; if (x.Count > 0) { MaxValueX = x.Max(); MinValueX = x.Min(); } if (y.Count > 0) { MaxValueY = y.Max(); MinValueY = y.Min(); } }
private static void ExecuteStrategy(IList <DefinitionDto> definitions, IList <ResultDto> results, IStrategy strategy, int repetition, out double relativeErrorAvg, out double maxError) { IList <double> errors = null; for (int repetitionIndex = 0; repetitionIndex < repetition; repetitionIndex++) { errors = definitions.Zip(results, (definition, result) => { var computedResult = strategy.Compute(definition); return((double)(result.Price - computedResult.Item1.Price) / result.Price); }).ToList(); } relativeErrorAvg = (errors?.Sum() ?? 0) / definitions.Count; maxError = errors?.Max() ?? 0; }
public void FixedUpdate() { Time.timeScale = TimeScale; if (!IsTrialRunning) { if (TrialCount < PopulationSize) { int index = TrialCount; TrialController.StartTrial(leftRules[index], rightRules[index]); IsTrialRunning = true; } else { using (var file = new StreamWriter(FilePath, true)) file.WriteLine(GenerationCount + "," + leftRules.Average(x => x.Fitness) + "," + leftRules.Max(x => x.Fitness) + "," + leftRules.Min(x => x.Fitness)); Debug.Log("BREEDING!"); leftRules = GeneticAlgorithm.BreedNewGeneration(leftRules, MutationChance); rightRules = GeneticAlgorithm.BreedNewGeneration(rightRules, MutationChance); Debug.Log("Finished Breeding!"); TrialCount = 0; GenerationCount++; if (GenerationCount >= GenerationLimit) { TimeScale = 0; } } } }
/// <summary> /// Add or update the building in the datasource. /// Additionally it will also add building construction types and building predominate uses. /// </summary> /// <param name="property"></param> /// <param name="pid"></param> /// <param name="agency"></param> /// <returns></returns> private Entity.Parcel AddUpdateBuilding(Model.ImportPropertyModel property, int pid, Entity.Agency agency) { var name = GenerateName(property.Name, property.Description, property.LocalId); // Multiple buildings could be returned for the PID and Name. var b_e = ExceptionHelper.HandleKeyNotFoundWithDefault(() => _pimsAdminService.Building.GetByPid(pid, name).FirstOrDefault(n => n.Name == name) ?? throw new KeyNotFoundException()); var evaluationDate = new DateTime(property.FiscalYear, 1, 1); // Defaulting to Jan 1st because SIS data doesn't have the actual date. // Find parcel var parcel = ExceptionHelper.HandleKeyNotFound(() => _pimsAdminService.Parcel.GetByPid(pid)); // Determine if the last evaluation or fiscal values are older than the one currently being imported. var fiscalNetBook = b_e.Fiscals.OrderByDescending(f => f.FiscalYear).FirstOrDefault(f => f.Key == Entity.FiscalKeys.NetBook && f.FiscalYear > property.FiscalYear); var evaluationAssessed = b_e.Evaluations.OrderByDescending(e => e.Date).FirstOrDefault(e => e.Key == Entity.EvaluationKeys.Assessed && e.Date > evaluationDate); // If the parcel doesn't exist yet we'll need to create a temporary one. if (parcel == null) { parcel = AddUpdateParcel(property, pid, agency); _logger.LogWarning($"Parcel '{property.PID}' was generated for a building that had no parcel."); } // Only want to update the properties with the latest information. if (b_e.Id == 0 || fiscalNetBook == null || evaluationAssessed == null) { // Copy properties over to entity. b_e.AgencyId = agency?.Id ?? throw new KeyNotFoundException($"Agency '{property.Agency}' does not exist."); b_e.Agency = agency; if (!b_e.Parcels.Any(pb => pb.ParcelId == parcel.Id)) { b_e.Parcels.Add(new Entity.ParcelBuilding(parcel, b_e) { Parcel = null, Building = null }); } b_e.Name = name; b_e.Description = property.Description.ConvertToUTF8(false); var lng = property.Longitude != 0 ? property.Longitude : b_e.Location?.X ?? 0; // This is to stop data from some imports resulting in removing the lat/long. var lat = property.Latitude != 0 ? property.Latitude : b_e.Location?.Y ?? 0; b_e.Location = new NetTopologySuite.Geometries.Point(lng, lat) { SRID = 4326 }; b_e.RentableArea = property.BuildingRentableArea; b_e.BuildingFloorCount = property.BuildingFloorCount; b_e.BuildingTenancy = property.BuildingTenancy.ConvertToUTF8(); b_e.TransferLeaseOnSale = false; Entity.PropertyClassification propClassification; if (String.Compare("Active", property.Status, true) == 0) { propClassification = _propertyClassifications.FirstOrDefault(pc => String.Compare(pc.Name, property.Classification, true) == 0) ?? throw new KeyNotFoundException($"Property Classification '{property.Classification}' does not exist."); } else { propClassification = _propertyClassifications.FirstOrDefault(pc => pc.Name == "Disposed") ?? throw new KeyNotFoundException($"Property Classification '{property.Status}' does not exist."); } b_e.ClassificationId = propClassification.Id; b_e.Classification = propClassification; // Find foreign key. var build_type = _buildingConstructionTypes.FirstOrDefault(bct => String.Compare(bct.Name, property.BuildingConstructionType, true) == 0); var build_use = _buildingPredominateUses.FirstOrDefault(bpu => String.Compare(bpu.Name, property.BuildingPredominateUse, true) == 0); // If the building construction type doesn't exist, create it. if (build_type == null) { var max_id = _buildingConstructionTypes.Max(pc => pc.Id) + 1; build_type = new Entity.BuildingConstructionType(max_id, property.BuildingConstructionType); _pimsAdminService.BuildingConstructionType.Add(build_type); _buildingConstructionTypes.Add(build_type); } // If the building predominate use doesn't exist, create it. if (build_use == null) { var max_id = _buildingPredominateUses.Max(pc => pc.Id) + 1; build_use = new Entity.BuildingPredominateUse(max_id, property.BuildingPredominateUse); _pimsAdminService.BuildingPredominateUse.Add(build_use); _buildingPredominateUses.Add(build_use); } b_e.BuildingConstructionTypeId = build_type.Id; b_e.BuildingConstructionType = build_type; b_e.BuildingPredominateUseId = build_use.Id; b_e.BuildingPredominateUse = build_use; // TODO: Handle this issue more gracefully. var city = _pimsAdminService.AdministrativeArea.Get(property.City.ConvertToUTF8()) ?? throw new InvalidOperationException($"Administrative area '{property.City}' does not exist in the datasource."); // Add/Update the address. if (b_e.AddressId == 0) { _logger.LogDebug($"Adding address for building '{property.PID}'-''{property.LocalId}'."); var address = new Entity.Address(property.CivicAddress.ConvertToUTF8(), null, city.Name, "BC", property.Postal.ConvertToUTF8()); b_e.Address = address; } else { b_e.Address.Address1 = property.CivicAddress.ConvertToUTF8(); b_e.Address.AdministrativeArea = city.Name; b_e.Address.Postal = property.Postal.ConvertToUTF8(); } } // Add a new fiscal values for each year. if (!b_e.Fiscals.Any(e => e.FiscalYear == property.FiscalYear)) { b_e.Fiscals.Add(new Entity.BuildingFiscal(b_e, property.FiscalYear, Entity.FiscalKeys.NetBook, property.NetBook)); } // Add a new evaluation if new. if (!b_e.Evaluations.Any(e => e.Date == evaluationDate)) { b_e.Evaluations.Add(new Entity.BuildingEvaluation(b_e, evaluationDate, Entity.EvaluationKeys.Assessed, property.Assessed)); } // A new building. if (b_e.Id == 0) { _pimsAdminService.Building.Add(b_e); _logger.LogDebug($"Adding building '{property.LocalId}' to parcel '{property.PID}'"); } else { _pimsAdminService.Building.Update(b_e); _logger.LogDebug($"Updating building '{property.LocalId}' to parcel '{property.PID}'"); } return(parcel); }
private List <TradeTypeProfitEntity> GetReportData(DateTime endDate, int teamId) { List <TradeTypeProfitEntity> result = new List <TradeTypeProfitEntity>(); //取得25个交易日日期 _queryDates = CommonHelper.GetWorkdaysBeforeCurrentDay(endDate).OrderBy(x => x).ToList(); result = this._statisticsReportService.CalculateTradeTypeProfit(teamId, _queryDates.Min(), _queryDates.Max()).ToList(); return(result); }
/// <summary> /// Streams the channel data. /// </summary> /// <param name="contextList">The context list.</param> /// <param name="token">The token.</param> /// <returns></returns> protected virtual async Task StreamChannelData(IList <ChannelStreamingContext> contextList, CancellationToken token) { _channelStreamingContextLists.Add(contextList); // These values can be set outside of our processing loop as the won't chnage //... as context is processed and completed. var firstContext = contextList.First(); var channelStreamingType = firstContext.ChannelStreamingType; var parentUri = firstContext.ParentUri; var indexes = firstContext.ChannelMetadata.Indexes.Cast <IIndexMetadataRecord>().ToList(); var primaryIndex = indexes[0]; var isTimeIndex = indexes.Select(i => i.IndexKind == (int)ChannelIndexTypes.Time).ToArray(); var requestLatestValues = channelStreamingType == ChannelStreamingTypes.IndexCount ? firstContext.IndexCount : channelStreamingType == ChannelStreamingTypes.LatestValue ? 1 : (int?)null; var increasing = primaryIndex.Direction == (int)IndexDirections.Increasing; bool?firstStart = null; // Loop until there is a cancellation or all channals have been removed while (!IsStreamingStopped(contextList, ref token)) { firstStart = !firstStart.HasValue; var channelIds = contextList.Select(i => i.ChannelId).Distinct().ToArray(); Logger.Debug($"Streaming data for parentUri {parentUri.Uri} and channelIds {string.Join(",", channelIds)}"); // We only need a start index value for IndexValue and RangeRequest or if we're streaming //... IndexCount or LatestValue and requestLatestValues is no longer set. var minStart = (channelStreamingType == ChannelStreamingTypes.IndexValue || channelStreamingType == ChannelStreamingTypes.RangeRequest) || ((channelStreamingType == ChannelStreamingTypes.IndexCount || channelStreamingType == ChannelStreamingTypes.LatestValue) && !requestLatestValues.HasValue) ? contextList.Min(x => Convert.ToInt64(x.StartIndex)) : (long?)null; // Only need and end index value for range request var maxEnd = channelStreamingType == ChannelStreamingTypes.RangeRequest ? contextList.Max(x => Convert.ToInt64(x.EndIndex)) : (long?)null; //var isTimeIndex = primaryIndex.IndexType == ChannelIndexTypes.Time; var rangeSize = WitsmlSettings.GetRangeSize(isTimeIndex[0]); // Convert indexes from scaled values var minStartIndex = minStart?.IndexFromScale(primaryIndex.Scale, isTimeIndex[0]); var maxEndIndex = channelStreamingType == ChannelStreamingTypes.IndexValue ? (increasing ? minStartIndex + rangeSize : minStartIndex - rangeSize) : maxEnd?.IndexFromScale(primaryIndex.Scale, isTimeIndex[0]); // Get channel data var mnemonics = contextList.Select(c => c.ChannelMetadata.ChannelName).ToList(); var dataProvider = GetDataProvider(parentUri); var optimiseStart = channelStreamingType == ChannelStreamingTypes.IndexValue; var channelData = dataProvider.GetChannelData(parentUri, new Range <double?>(minStartIndex, maxEndIndex), mnemonics, requestLatestValues, optimiseStart); // Stream the channel data await StreamChannelData(contextList, channelData, mnemonics.ToArray(), increasing, isTimeIndex, primaryIndex.Scale, firstStart.Value, token); // If we have processed an IndexCount or LatestValue query clear requestLatestValues so we can //... keep streaming new data as long as the channel is active. if (channelStreamingType == ChannelStreamingTypes.IndexCount || channelStreamingType == ChannelStreamingTypes.LatestValue) { requestLatestValues = null; } // Check each context to see of all the data has streamed. var completedContexts = contextList .Where( c => (c.ChannelStreamingType != ChannelStreamingTypes.RangeRequest && c.ChannelMetadata.Status != (int)ChannelStatuses.Active && c.ChannelMetadata.EndIndex.HasValue && c.LastIndex >= c.ChannelMetadata.EndIndex.Value) || (c.ChannelStreamingType == ChannelStreamingTypes.RangeRequest && c.LastIndex >= c.EndIndex)) .ToArray(); // Remove any contexts from the list that have completed returning all data completedContexts.ForEach(c => { // Notify consumer if the ReceiveChangeNotification field is true if (c.ChannelMetadata.Status != (int)ChannelStatuses.Active && c.ReceiveChangeNotification) { // TODO: Decide which message shoud be sent... // ChannelStatusChange(c.ChannelId, c.ChannelMetadata.Status); // ChannelRemove(c.ChannelId); } contextList.Remove(c); }); // Delay to prevent CPU overhead await Task.Delay(WitsmlSettings.StreamChannelDataDelayMilliseconds, token); } }
/// <summary> /// Get total dimensions /// </summary> /// <param name="packageItems">Package items</param> /// <param name="width">Width</param> /// <param name="length">Length</param> /// <param name="height">Height</param> /// <param name="ignoreFreeShippedItems">Whether to ignore the weight of the products marked as "Free shipping"</param> public virtual void GetDimensions(IList <GetShippingOptionRequest.PackageItem> packageItems, out decimal width, out decimal length, out decimal height, bool ignoreFreeShippedItems = false) { if (packageItems == null) { throw new ArgumentNullException(nameof(packageItems)); } //calculate cube root of volume, in case if the number of items more than 1 if (_shippingSettings.UseCubeRootMethod && AreMultipleItems(packageItems)) { //find max dimensions of the shipped items var maxWidth = packageItems.Max(item => !item.Product.IsFreeShipping || !ignoreFreeShippedItems ? item.Product.Width : decimal.Zero); var maxLength = packageItems.Max(item => !item.Product.IsFreeShipping || !ignoreFreeShippedItems ? item.Product.Length : decimal.Zero); var maxHeight = packageItems.Max(item => !item.Product.IsFreeShipping || !ignoreFreeShippedItems ? item.Product.Height : decimal.Zero); //get total volume of the shipped items var totalVolume = packageItems.Sum(packageItem => { //product volume var productVolume = !packageItem.Product.IsFreeShipping || !ignoreFreeShippedItems ? packageItem.Product.Width * packageItem.Product.Length * packageItem.Product.Height : decimal.Zero; //associated products volume if (_shippingSettings.ConsiderAssociatedProductsDimensions && !string.IsNullOrEmpty(packageItem.ShoppingCartItem.AttributesXml)) { productVolume += _productAttributeParser.ParseProductAttributeValues(packageItem.ShoppingCartItem.AttributesXml) .Where(attributeValue => attributeValue.AttributeValueType == AttributeValueType.AssociatedToProduct).Sum(attributeValue => { var associatedProduct = _productService.GetProductById(attributeValue.AssociatedProductId); if (associatedProduct == null || !associatedProduct.IsShipEnabled || (associatedProduct.IsFreeShipping && ignoreFreeShippedItems)) { return(0); } //adjust max dimensions maxWidth = Math.Max(maxWidth, associatedProduct.Width); maxLength = Math.Max(maxLength, associatedProduct.Length); maxHeight = Math.Max(maxHeight, associatedProduct.Height); return(attributeValue.Quantity * associatedProduct.Width * associatedProduct.Length * associatedProduct.Height); }); } //total volume of item return(productVolume * packageItem.GetQuantity()); }); //set dimensions as cube root of volume width = length = height = Convert.ToDecimal(Math.Pow(Convert.ToDouble(totalVolume), 1.0 / 3.0)); //sometimes we have products with sizes like 1x1x20 //that's why let's ensure that a maximum dimension is always preserved //otherwise, shipping rate computation methods can return low rates width = Math.Max(width, maxWidth); length = Math.Max(length, maxLength); height = Math.Max(height, maxHeight); } else { //summarize all values (very inaccurate with multiple items) width = length = height = decimal.Zero; foreach (var packageItem in packageItems) { var productWidth = decimal.Zero; var productLength = decimal.Zero; var productHeight = decimal.Zero; if (!packageItem.Product.IsFreeShipping || !ignoreFreeShippedItems) { productWidth = packageItem.Product.Width; productLength = packageItem.Product.Length; productHeight = packageItem.Product.Height; } //associated products GetAssociatedProductDimensions(packageItem.ShoppingCartItem, out var associatedProductsWidth, out var associatedProductsLength, out var associatedProductsHeight); var quantity = packageItem.GetQuantity(); width += (productWidth + associatedProductsWidth) * quantity; length += (productLength + associatedProductsLength) * quantity; height += (productHeight + associatedProductsHeight) * quantity; } } }
private bool MatchTextInternal(IList <string> expected, bool matchAtStart, bool matchAtEnd, bool showOutput) { // Resplit lines to handle cases where linebreaks are embedded in // a single string. This helps ensure the comparison is correct and // the output is sensible. expected = expected.SelectMany(l => l.Split('\n')).Select(l => l.TrimEnd('\r', '\n', ' ')).ToList(); var snapshot = Window.TextView.TextBuffer.CurrentSnapshot; var lines = snapshot.Lines; // Cap the number of lines we'll ever look at to avoid breaking here // when tests get stuck in infinite loops if (matchAtStart && !matchAtEnd) { lines = lines.Take(expected.Count + 1); } else if (!matchAtStart && matchAtEnd) { lines = lines.Skip(snapshot.LineCount - expected.Count - 2); } var actual = lines .SelectMany(l => l.GetText().Split('\n')) .Select(l => l.TrimEnd('\r', '\n', ' ')) .ToList(); var primary = CurrentPrimaryPrompt; if (IsIPythonPrompt(primary)) { // IPython prompts include an incrementing index, which we must remove for comparison actual = actual.Select(l => RemoveIndexFromIPythonPrompt(l)).ToList(); } bool isMatch = true; var leftWidth = Math.Max("Expected".Length, expected.Max(s => s.Length)); var format = string.Format("{{0}}{{1}}{{2}}{{3,-{0}}} {{4}}", leftWidth); if (showOutput) { Console.WriteLine(format, " ", " ", " ", "Expected", "Actual"); if (matchAtEnd && !matchAtStart) { Console.WriteLine("(Lines in reverse order)"); } } if (matchAtStart) { for (int i = 0; ; ++i) { if (i >= expected.Count && i >= actual.Count) { break; } else if ((i >= expected.Count || i >= actual.Count) && matchAtEnd) { isMatch = false; } bool lineMatch = false; if (i < expected.Count && i < actual.Count) { lineMatch = expected[i] == actual[i]; isMatch &= lineMatch; } if (showOutput) { Console.WriteLine( format, i < expected.Count ? " " : "-", lineMatch ? " " : "*", i < actual.Count ? " " : "-", i < expected.Count ? expected[i] : "", i < actual.Count ? actual[i] : "" ); } } } else if (matchAtEnd) { for (int i = -1; ; --i) { int e_i = expected.Count + i; int a_i = actual.Count + i; if (e_i < 0 && a_i < 0) { break; } bool lineMatch = false; if (e_i > 0 && a_i > 0) { lineMatch = expected[e_i] == actual[a_i]; isMatch &= lineMatch; } else if (e_i == 0 && a_i >= 0) { lineMatch = actual[a_i].EndsWith(expected[e_i]); isMatch &= lineMatch; } if (showOutput) { Console.WriteLine( format, e_i >= 0 ? " " : "-", lineMatch ? " " : "*", a_i >= 0 ? " " : "-", e_i >= 0 ? expected[e_i] : "", a_i >= 0 ? actual[a_i] : "" ); } } } else { throw new NotImplementedException(); } return(isMatch); }
private void UpdateBasedOnStatisticSeries(Dictionary <TournamentChartSeries, List <ChartSeriesItem> > chartSeriesItems, IList <Tournaments> tournaments) { if (tournaments == null || tournaments.Count == 0) { return; } var chartItemDataBuilder = CreateChartItemDataBuilder(ChartDisplayRange); var tournamentChartItemDataBuilder = CreateTournamentChartItemDataBuilder(ChartDisplayRange); var firstDate = tournamentChartItemDataBuilder.GetFirstDate(tournaments.Max(x => x.Firsthandtimestamp)); var groupedTournaments = tournaments .Where(x => x.Firsthandtimestamp >= firstDate && (tournamentChartFilterType == TournamentChartFilterType.All || tournamentChartFilterType == TournamentChartFilterType.MTT && x.Tourneytagscsv == TournamentsTags.MTT.ToString() || tournamentChartFilterType == TournamentChartFilterType.STT && x.Tourneytagscsv == TournamentsTags.STT.ToString())) .GroupBy(x => x.BuildKey()).ToDictionary(x => x.Key, x => x.FirstOrDefault()); var filteredTournamentPlayerStatistic = StorageModel .GetFilteredTournamentPlayerStatistic() .Where(x => groupedTournaments.ContainsKey(new TournamentKey(x.PokersiteId, x.TournamentId))) .ToArray(); // filter and orders var stats = chartItemDataBuilder.PrepareStatistic(filteredTournamentPlayerStatistic); object previousGroupKey = null; var itemsCounter = 0; for (var statIndex = 0; statIndex < stats.Length; statIndex++) { var stat = stats[statIndex]; var currentGroupKey = chartItemDataBuilder.BuildGroupKey(stat, statIndex); var isNew = !currentGroupKey.Equals(previousGroupKey); if (isNew) { itemsCounter++; } previousGroupKey = currentGroupKey; foreach (var chartSerie in ChartCollection.Where(x => x.IsBasedOnStatistic)) { ChartSeriesItem previousChartSeriesItem = null; ChartSeriesItem chartSeriesItem = null; if (!chartSeriesItems.ContainsKey(chartSerie)) { chartSeriesItems.Add(chartSerie, new List <ChartSeriesItem>()); } if (isNew) { chartSeriesItem = new ChartSeriesItem { Format = chartSerie.Format, Category = chartItemDataBuilder.GetValueFromGroupKey(currentGroupKey), PointColor = chartSerie.ColorsPalette.PointColor, TrackBallColor = chartSerie.ColorsPalette.TrackBallColor, TooltipColor = chartSerie.ColorsPalette.TooltipColor, TooltipForegroundColor = chartSerie.ColorsPalette.TooltipForeground }; previousChartSeriesItem = chartSeriesItems[chartSerie].LastOrDefault(); chartSeriesItems[chartSerie].Add(chartSeriesItem); } else { previousChartSeriesItem = chartSeriesItem = chartSeriesItems[chartSerie].LastOrDefault(); } chartSerie.UpdateChartSeriesItemByStatistic?.Invoke(chartSeriesItem, previousChartSeriesItem, stat); } } }
public ClassMateria NuevaMateria(ClassMateria nuevo) { nuevo.ID = materias.Max(m => m.ID) + 1; materias.Add(nuevo); return(nuevo); }
/// <summary> /// Calculate FPS metric values. /// frametimes. /// </summary> /// <param name="sequence"></param> /// <param name="metric"></param> /// <returns>metric value</returns> public double GetFpsMetricValue(IList <double> sequence, EMetric metric) { double metricValue; IList <double> fps = sequence.Select(ft => 1000 / ft).ToList(); switch (metric) { case EMetric.Max: metricValue = fps.Max(); break; case EMetric.P99: metricValue = GetPQuantileSequence(fps, 0.99); break; case EMetric.P95: metricValue = GetPQuantileSequence(fps, 0.95); break; case EMetric.Average: metricValue = sequence.Count * 1000 / sequence.Sum(); break; case EMetric.Median: metricValue = GetPQuantileSequence(fps, 0.5); break; case EMetric.P5: metricValue = GetPQuantileSequence(fps, 0.05); break; case EMetric.P1: metricValue = GetPQuantileSequence(fps, 0.01); break; case EMetric.P0dot2: metricValue = GetPQuantileSequence(fps, 0.002); break; case EMetric.P0dot1: metricValue = GetPQuantileSequence(fps, 0.001); break; case EMetric.OnePercentLow: metricValue = 1000 / GetPercentageHighSequence(sequence, 1 - 0.01); break; case EMetric.ZerodotOnePercentLow: metricValue = 1000 / GetPercentageHighSequence(sequence, 1 - 0.001); break; case EMetric.Min: metricValue = fps.Min(); break; case EMetric.AdaptiveStd: metricValue = GetAdaptiveStandardDeviation(fps, _options.IntervalAverageWindowTime); break; default: metricValue = double.NaN; break; } return(Math.Round(metricValue, _options.FpsValuesRoundingDigits)); }
public ActionResult <Product> AddItem([FromBody] Product product) { product.Id = products.Max(s => s.Id) + 1; products.Add(product); return(Created("", product)); }
public Employee AddEmployee(Employee employee) { employee.Id = employees.Max(e => e.Id) + 1; employees.Add(employee); return(employee); }
public void Add(Auther element) { element.id = authers.Max(a => a.id) + 1; authers.Add(element); }
/// <summary> /// Initialize search algorithm (Build keyword tree) /// </summary> /// <param name="keywords">Keywords to search for</param> internal Searcher(IList <ngram_t <T> > ngrams) { _Root = TreeNode.BuildTree(ngrams); NgramMaxLength = (0 < ngrams.Count) ? ngrams.Max(ngram => ngram.words.Length) : 0; }
private bool DoWHOptimizationStep() { double desiredWidth = _actualHeight * Parameters.WidthPerHeight; int maxWidthLayerIndex = 0; var maxWidthLayer = _whOptLayerInfos[0]; for (int i = 0; i < _whOptLayerInfos.Count; i++) { if (_whOptLayerInfos[i].LayerWidth > maxWidthLayer.LayerWidth && _whOptLayerInfos[i].Vertices.Count > 0 && _whOptLayerInfos[i].LayerWidth > desiredWidth) { maxWidthLayer = _whOptLayerInfos[i]; maxWidthLayerIndex = i; } } if (maxWidthLayer.LayerWidth <= desiredWidth || maxWidthLayer.Vertices.Count <= 0) { return(false); } //get a layer nearby the maxWidthLayer int insertedLayerIndex = -1; WHOptimizationLayerInfo insertedLayerInfo = null; IList <SugiVertex> insertedLayer = null; if (maxWidthLayerIndex < _whOptLayerInfos.Count - 1 && _whOptLayerInfos[maxWidthLayerIndex + 1].IsInsertedLayer && _whOptLayerInfos[maxWidthLayerIndex + 1].LayerWidth < (desiredWidth - maxWidthLayer.Vertices.Peek().Cost)) { insertedLayerIndex = maxWidthLayerIndex + 1; insertedLayerInfo = _whOptLayerInfos[insertedLayerIndex]; insertedLayer = _layers[insertedLayerIndex]; } else { //insert a new layer insertedLayerIndex = maxWidthLayerIndex + 1; double width = 0; double c = 0; if (insertedLayerIndex > 0) { foreach (var vertex in _layers[insertedLayerIndex - 1]) { width += Math.Max(0, _graph.OutDegree(vertex) - 1) * Parameters.LayerDistance; } c += 1; } if (insertedLayerIndex < _layers.Count - 1) { foreach (var vertex in _layers[insertedLayerIndex]) { width += Math.Max(0, _graph.OutDegree(vertex) - 1) * Parameters.LayerDistance; } c += 1; } if (c > 0) { width /= c; } if (width >= (desiredWidth - _whOptLayerInfos[insertedLayerIndex - 1].Vertices.Peek().Cost)) { return(false); } insertedLayerInfo = new WHOptimizationLayerInfo(); insertedLayerInfo.LayerWidth = width; insertedLayer = new List <SugiVertex>(); _whOptLayerInfos.Insert(insertedLayerIndex, insertedLayerInfo); _layers.Insert(insertedLayerIndex, insertedLayer); double height = 0.0; while (insertedLayerInfo.LayerWidth < _whOptLayerInfos[insertedLayerIndex - 1].LayerWidth && _whOptLayerInfos[insertedLayerIndex - 1].Vertices.Count > 0 && insertedLayerInfo.LayerWidth <= (desiredWidth - _whOptLayerInfos[insertedLayerIndex - 1].Vertices.Peek().Cost)) { var repositionedVertex = _whOptLayerInfos[insertedLayerIndex - 1].Vertices.Dequeue(); insertedLayerInfo.LayerWidth += repositionedVertex.Cost; _whOptLayerInfos[insertedLayerIndex - 1].LayerWidth -= repositionedVertex.Value; _layers[insertedLayerIndex - 1].Remove(repositionedVertex.Vertex); insertedLayer.Add(repositionedVertex.Vertex); height = Math.Max(height, repositionedVertex.Vertex.Size.Height); } _actualHeight += height + Parameters.LayerDistance; _actualWidth = _whOptLayerInfos.Max(li => li.LayerWidth); } return(true); }
public void Add(Author entity) { entity.Id = authors.Max(b => b.Id) + 1; authors.Add(entity); }
private static double GetCaseCriterion(IList <double> values, double pessimismCoefficient) { return(pessimismCoefficient * values.Min() + (1 - pessimismCoefficient) * values.Max()); }
public T Max() { return(list.Max()); }
void IDropTarget.Drop(DropInfo dropInfo) { if (dropInfo.DragInfo == null) { return; } ListBox lbSource = dropInfo.DragInfo.VisualSource as ListBox; ListBox lbTarget = dropInfo.VisualTarget as ListBox; int insertIndex = dropInfo.InsertIndex; IList <CardViewModel> sourceList = null; IList <CardViewModel> destinationList = GetList(dropInfo.TargetCollection); IEnumerable <CardViewModel> data = ExtractData(dropInfo.Data); if (lbSource != null && lbTarget != null && lbSource.DataContext.GetType() == lbTarget.DataContext.GetType()) { sourceList = GetList(dropInfo.DragInfo.SourceCollection); } if (this.PreserveSourceOrdering) { foreach (CardViewModel o in data) { sourceList.Remove(o); insertIndex = -1; if (destinationList.Count > 0) { insertIndex = destinationList.Max(cvm => cvm.OriginalIndex < o.OriginalIndex ? destinationList.IndexOf(cvm) : -1); } if (insertIndex == -1) { destinationList.Insert(0, o); } else if (insertIndex == destinationList.Count - 1) { destinationList.Add(o); } else { destinationList.Insert(insertIndex + 1, o); } } } else { if (sourceList != null) { foreach (CardViewModel o in data) { int index = sourceList.IndexOf(o); if (index != -1) { sourceList.RemoveAt(index); if (sourceList == destinationList && index < insertIndex) { --insertIndex; } } } } foreach (CardViewModel o in data) { destinationList.Insert(insertIndex++, o); } } }
private InMemoryCrudAdapter(IList <T> source, string keyName, bool isIdentity) : base ( /* create */ (e) => { var t = typeof(T); if (isIdentity) { var param = Expression.Parameter(t, "e"); var prop = Expression.Property(param, keyName); var selector = Expression.Lambda(prop, param); var nextId = (source.Any() ? source.Max((Func <T, int>)selector.Compile()) : 0) + 1; t.GetProperty(keyName).SetValue(e, nextId); } var entry = new T(); foreach (var field in _fieldList) { var f = t.GetProperty(field); f.SetValue(entry, f.GetValue(e)); } source.Add(entry); }, /* update */ (e) => { var entry = source.FirstOrDefault(CreatePredicate(e, keyName)); if (entry != null) { var t = typeof(T); foreach (var field in _fieldListWithoutKey) { var f = t.GetProperty(field); f.SetValue(entry, f.GetValue(e)); } } }, /* delete */ (e) => { var entry = source.FirstOrDefault(CreatePredicate(e, keyName)); if (entry != null) { source.Remove(entry); } }, /* read */ (predicate) => { return(source.Where(predicate.Compile()).AsQueryable()); }, /* read */ (sql, parameters) => { return(Enumerable.Empty <T>().AsQueryable()); } ) { if (_fieldList == null) { _fieldList = typeof(T).GetProperties().Where(p => p.PropertyType.IsSealed && p.GetAccessors().Any(a => !(a.IsVirtual && !a.IsFinal) && a.ReturnType == typeof(void))).Select(p => p.Name).ToList(); _fieldListWithoutKey = _fieldList.Where(f => !string.Equals(f, keyName, StringComparison.OrdinalIgnoreCase)).ToList(); } }
public IList <IndexData> Calculate(IList <IndexData> last, IList <PriceInfo> priceList, System.Collections.Specialized.NameValueCollection parameter) { var me = this; var tech = new List <IndexData>(); //1、计算N天的收盘价的指数平均AX //AX=(I日)收盘价×2÷(N+1)+(I-1)日AX ×(N-1)/(N+1) //2、计算N天的AX的指数平均BX //BX=(I日)AX×2÷(N+1)+(I-1)日BX ×(N-1)/(N+1) //3、计算N天的BX的指数平均TRIX //TRIX=(I日)BX×2÷(N+1)+(I-1)日TAIX ×(N-1)/(N+1) //4、计算TRIX的m日移动平均TRMA List <double> ax = new List <double>(), bx = new List <double>(), tr = new List <double>(), trix = new List <double>(), trma = new List <double>(); List <double> axEma = new List <double>(), bxEma = new List <double>(), trEma = new List <double>(); int c1 = int.Parse(parameter["c1"] + ""); int c2 = int.Parse(parameter["c2"] + ""); var x = 0; List <PriceInfo> data = null; int takeCount = 0; if (last.Count <= 0) { return(Calculate(priceList, parameter)); } else { DateTime maxTime = last.Max(p => p.date); data = priceList.Where(p => p.date > maxTime).ToList(); if (data.Count == 0) { return(new List <IndexData>()); } takeCount = last.Count < c2 ? data.Count : c2; //返回小于最大日期最近n条 IList <PriceInfo> oldData = priceList.Where(p => p.date <= maxTime).OrderByDescending(p => p.date).Take(takeCount).ToList(); data.AddRange(oldData); //时间顺序 data = data.OrderBy(p => p.date).ToList(); IList <IndexData> ixs = last .OrderByDescending(p => p.date) .Take(takeCount) .OrderBy(p => p.date).ToList(); foreach (IndexData ix in ixs) { trix.Add(ix[0]); trma.Add(ix[1]); ax.Add(ix[2]); axEma.Add(ix[3]); bx.Add(ix[4]); bxEma.Add(ix[5]); tr.Add(ix[6]); trEma.Add(ix[7]); } } //第一次平均 for (int i = takeCount; i < data.Count; i++) { var a = me.getEma((double)data[i].price, ax[i - 1], c1, i); ax.Add(a); var y = me.getEmaAvg(ax, c1, i); axEma.Add(y); } //第二次平均 //bx.Add(ax[0]); //bxEma.Add(axEma[0]); for (int i = takeCount; i < axEma.Count; i++) { var a = me.getEma(axEma[i], bx[i - 1], c1, i); bx.Add(a); var y = me.getEmaAvg(bx, c1, i); bxEma.Add(y); } //第三次平均 //tr.Add(bx[0]); //trEma.Add(bxEma[0]); for (int i = takeCount; i < bxEma.Count; i++) { var a = me.getEma(bxEma[i], tr[i - 1], c1, i); tr.Add(a); var y = me.getEmaAvg(tr, c1, i); trEma.Add(y); } //计算trix //trix.Add(0); for (int i = takeCount; i < trEma.Count; i++) { double z = (trEma[i] - trEma[i - 1]) / trEma[i - 1] * 100; trix.Add(z); } //计算TRMA for (int i = takeCount; i < trix.Count; i++) { var y = me.getAvg2(c2, trix, i); trma.Add(y); } for (int i = takeCount; i < data.Count; i++) { tech.Add(new IndexData(data[i].date) { trix[i], trma[i], ax[i], axEma[i], bx[i], bxEma[i], tr[i], trEma[i] }); } return(tech); }
public Product Create(Product newProduct) { newProduct.Id = _products.Max(product => product.Id) + 1; _products.Add(newProduct); return(newProduct); }
private Image DrawSurfaceInRealScale() { var buffer = new Bitmap(m_letters.Sum(x => x.Rect.Width) + (m_letters.Count + 1) * Offset, m_letters.Max(x => x.Rect.Height) + Offset * 2); { using (var gfx = Graphics.FromImage(buffer)) { gfx.Clear(Color.Transparent); gfx.TextRenderingHint = TextRenderingHint.SingleBitPerPixelGridFit; foreach (var letterBox in m_letters) { gfx.FillRectangle(Brushes.White, letterBox.Rect); gfx.DrawRectangle(letterBox.IsSelected ? Pens.DeepSkyBlue : letterBox.IsPreSelected ? Pens.LightBlue : Pens.LightGray, letterBox.Rect); if (string.IsNullOrEmpty(letterBox.Letter)) { continue; } try { gfx.DrawString(letterBox.Letter, m_font, Brushes.Black, letterBox.Rect.X + letterBox.LetterOffsetX, letterBox.Rect.Y + letterBox.LetterOffsetY, StringFormat.GenericTypographic); } catch { // Ignore } } } return(buffer); } }
internal void Show(GitFileStatusTracker tracker, bool scroll) { this.tracker = tracker; //loading.Visibility = Visibility.Visible; Action action = () => { try { IList <GraphNode> commits = null; string hash = null; if (tracker != null && tracker.IsGit) { commits = tracker.RepositoryGraph.Nodes; hash = GetHashCode(commits); } bool changed = true; // lastHash == null ? hash != null : !lastHash.Equals(hash); if (changed) { lastHash = hash; canvasContainer.Children.Clear(); maxX = maxY = 0; if (changed && commits != null && commits.Any()) { maxX = commits.Count(); maxY = commits.Max(c => c.X); for (int i = commits.Count() - 1; i >= 0; i--) { var commit = commits[i]; #region Add commit box var box = new CommitBox(); box.DataContext = new { Id = commit.Id, ShortId = commit.ShortId, Comments = commit.Message, Author = commit.AuthorName, Date = commit.AuthorDateRelative, Refs = commit.Refs }; double left = GetScreenX(maxX - commit.Y); double top = GetScreenY(commit.X); Canvas.SetLeft(box, left); Canvas.SetTop(box, top); Canvas.SetZIndex(box, 10); this.canvasContainer.Children.Add(box); #endregion #region Add Branches var m = 0; foreach (var head in commit.Refs.Where(r => r.Type == RefTypes.Branch || r.Type == RefTypes.HEAD)) { var control = new CommitHead { DataContext = head, }; Canvas.SetLeft(control, left + CommitBox.WIDTH + 4); Canvas.SetTop(control, top + m++ *30); this.canvasContainer.Children.Add(control); } #endregion #region Add Tags m = 0; foreach (var tag in commit.Refs.Where(r => r.Type == RefTypes.Tag)) { var control = new CommitTag { DataContext = tag, }; Canvas.SetLeft(control, left + m++ *80); // TODO: get width of the control Canvas.SetTop(control, top - 24); this.canvasContainer.Children.Add(control); } #endregion #region Add Remote Branches m = 0; foreach (var name in commit.Refs.Where(r => r.Type == RefTypes.RemoteBranch)) { var control = new CommitRemote { DataContext = new { Text = name }, }; Canvas.SetLeft(control, left + m++ *100); // TODO: get width of the control Canvas.SetTop(control, top + CommitBox.HEIGHT + 4); this.canvasContainer.Children.Add(control); } #endregion } #region Add commit links var links = tracker.RepositoryGraph.Links; foreach (var link in links) { // current node double x1 = link.Y1; double y1 = link.X1; // parent node double x2 = link.Y2; double y2 = link.X2; bool flip = links.Any(lnk => lnk.X1 == x2 && lnk.Y2 == y2 && lnk.X1 == lnk.X2); x1 = GetScreenX(maxX - x1); y1 = GetScreenY(y1) + CommitBox.HEIGHT / 2; x2 = GetScreenX(maxX - x2) + CommitBox.WIDTH; y2 = GetScreenY(y2) + CommitBox.HEIGHT / 2; if (y1 == y2) { var line = new Line { Stroke = new SolidColorBrush(Color.FromArgb(255, 153, 182, 209)), StrokeThickness = 4, }; line.X1 = x1; line.Y1 = y1; line.X2 = x2; line.Y2 = y2; this.canvasContainer.Children.Add(line); } else if (y1 > y2 && !flip) { var x3 = x2 - CommitBox.WIDTH / 2; var path = new System.Windows.Shapes.Path { Stroke = new SolidColorBrush(Color.FromArgb(255, 153, 182, 209)), StrokeThickness = 4, }; PathSegmentCollection pscollection = new PathSegmentCollection(); pscollection.Add(new LineSegment(new Point(x2, y1), true)); BezierSegment curve = new BezierSegment( new Point(x2, y1), new Point(x3, y1), new Point(x3, y2), true); pscollection.Add(curve); PathFigure pf = new PathFigure { StartPoint = new Point(x1, y1), Segments = pscollection, }; PathFigureCollection pfcollection = new PathFigureCollection(); pfcollection.Add(pf); PathGeometry pathGeometry = new PathGeometry(); pathGeometry.Figures = pfcollection; path.Data = pathGeometry; this.canvasContainer.Children.Add(path); } else { var x3 = x1 + CommitBox.WIDTH / 2; var path = new System.Windows.Shapes.Path { Stroke = new SolidColorBrush(Color.FromArgb(255, 153, 182, 209)), StrokeThickness = 4, }; PathSegmentCollection pscollection = new PathSegmentCollection(); BezierSegment curve = new BezierSegment( new Point(x3, y1), new Point(x3, y2), new Point(x1, y2), true); pscollection.Add(curve); pscollection.Add(new LineSegment(new Point(x2, y2), true)); PathFigure pf = new PathFigure { StartPoint = new Point(x3, y1), Segments = pscollection, }; PathFigureCollection pfcollection = new PathFigureCollection(); pfcollection.Add(pf); PathGeometry pathGeometry = new PathGeometry(); pathGeometry.Figures = pfcollection; path.Data = pathGeometry; this.canvasContainer.Children.Add(path); } } #endregion } AdjustCanvasSize(); } if (scroll) { //this.Scaler.ScaleX = this.Scaler.ScaleY = 1; //AdjustCanvasSize(); this.zoomAndPanControl.ContentScale = 1; this.canvasContainer.SetValue(Canvas.LeftProperty, 0.0); this.canvasContainer.SetValue(Canvas.TopProperty, 0.0); this.scrollRoot.ScrollToRightEnd(); } } catch (Exception ex) { Log.WriteLine("History Graph Show: {0}", ex.ToString()); } HistoryViewCommands.GraphLoaded.Execute(null, this); }; this.Dispatcher.BeginInvoke(action, DispatcherPriority.Background); }
public int MaxIndex() { return(_values.Max()); }
public void Add(TItem item) { items.Add(item); item.Id = items.Max(s => s.Id) + 1; Save(); }