/// <inheritdoc /> public IResultAggregator GetAggregator(Aggregation aggregation) { if (Aggregations.Value.ContainsKey(aggregation)) { return Aggregations.Value[aggregation]; } return _fallbackAggregation; }
public IResultAggregator GetAggregator(Aggregation aggregation) { if (aggregation == Aggregation.Original) { return new OriginalResult(); } return new SingleOrDefault(); }
public static void CanAggregateLettersUsingChunkWhile() { var nums = new SeriesBuilder <int, char> { { 0, 'a' }, { 10, 'b' }, { 11, 'c' } }.Series; var actual = nums.Aggregate(Aggregation.ChunkWhile <int>((k1, k2) => k2 - k1 < 10), segment => segment.Data.Keys.First(), segment => new OptionalValue <string>(new string(segment.Data.Values.ToArray()))); var expected = new SeriesBuilder <int, string> { { 0, "a" }, { 10, "bc" } }.Series; Assert.AreEqual(expected, actual); }
public AggDimension(AggregationDimension aggDim, string sCorrectAggregationDesignName, string sReportTitle, bool bHighlightRow) { Aggregation agg = aggDim.Parent; mAggName = agg.Name; mAggDesignName = sCorrectAggregationDesignName; mMeasureGroupName = agg.ParentMeasureGroup.Name; mCubeNameOrReportTitle = (sReportTitle == null ? agg.ParentCube.Name : sReportTitle); mDatabaseName = agg.ParentDatabase.Name; mAttributes = string.Empty; mDimension = aggDim.CubeDimension.Name; mHighlightRow = bHighlightRow; foreach (AggregationAttribute aa in aggDim.Attributes) { if (!string.IsNullOrEmpty(mAttributes)) { mAttributes += "\r\n"; } mAttributes += aa.Attribute.Name; } }
public Sql_Select_Attr(String val1, String val2, Boolean hasTable, String aggregation, Boolean hasAggregation) { this.hasTable = hasTable; this.hasAggregation = hasAggregation; if (this.hasAggregation) { this.aggregation = getMatchedAggregatioinOrRaiseException(aggregation); } if (hasTable) //have parsed a table from attr { //if has Table, ex: val.val2 this.name = val2; //replace the first dot this.tableAlias = val1; } else { //else: val this.name = val1; } }
private void AssertColumSelectionIsValid(int index, string expectedName, Type expectedType, string expectedAlias = null, Aggregation expectedAggregation = Aggregation.None) { var columns = this.Command.Specification.Columns; columns.Should() .NotBeEmpty(); var selection = columns[index]; selection.Identifier.Should() .Be(expectedName); selection.Alias.Should() .Be(expectedAlias); selection.EntityType.Should() .Be(expectedType); selection.Aggregation.Should() .Be(expectedAggregation); }
public static BitmapSource DrawScore(BitmapSource baseImage, Face[] faces) { double acumulador = 0; if (faces == null) { return(baseImage); } Action <DrawingContext, double> drawAction = (drawingContext, annotationScale) => { if (faces != null) { for (int i = 0; i < faces.Length; i++) { var emotionDominat = Aggregation.GetDominantEmotion(faces[i].FaceAttributes.Emotion); if (emotionDominat.Item1 == "Happiness") { acumulador += faces[i].FaceAttributes.Emotion.Happiness; } } FormattedText ft = new FormattedText(acumulador.ToString(), CultureInfo.CurrentCulture, FlowDirection.LeftToRight, s_typeface, 50, Brushes.Black); var origin = new System.Windows.Point(10, 10); //var rect = ft.BuildHighlightGeometry(origin).GetRenderBounds(null); //rect.Width = 50; //rect.Height = 50; //rect.Inflate(1, 1); //drawingContext.DrawRectangle(s_lineBrush, null, rect); drawingContext.DrawText(ft, origin); } }; return(DrawOverlay(baseImage, drawAction)); }
public async Task TestRulesAdditionDeletion() { var key = CreateKeyName(); var db = redisFixture.Redis.GetDatabase(); await db.TimeSeriesCreateAsync(key); foreach (var aggregation in Aggregation.GetEnumerator()) { await db.TimeSeriesCreateAsync($"{key}:{aggregation.Name}"); } var timeBucket = 50L; var rules = new List <TimeSeriesRule>(); var rulesMap = new Dictionary <Aggregation, TimeSeriesRule>(); foreach (var aggregation in Aggregation.GetEnumerator()) { var rule = new TimeSeriesRule($"{key}:{aggregation.Name}", timeBucket, aggregation); rules.Add(rule); rulesMap[aggregation] = rule; Assert.True(await db.TimeSeriesCreateRuleAsync(key, rule)); var info = await db.TimeSeriesInfoAsync(key); Assert.Equal(rules, info.Rules); } foreach (var aggregation in Aggregation.GetEnumerator()) { var rule = rulesMap[aggregation]; rules.Remove(rule); Assert.True(await db.TimeSeriesDeleteRuleAsync(key, rule.DestKey)); var info = await db.TimeSeriesInfoAsync(key); Assert.Equal(rules, info.Rules); } await db.KeyDeleteAsync(Aggregation.GetEnumerator().Select(i => (RedisKey)$"{key}:{i.Name}").ToArray()); }
protected virtual Aggregation GetAttributeAggregation(AttributeFilter attributeFilter, IList <AggregationResponse> aggregationResponses) { Aggregation result = null; var fieldName = attributeFilter.Key; var aggregationResponse = aggregationResponses.FirstOrDefault(a => a.Id.EqualsInvariant(fieldName)); if (aggregationResponse != null) { IList <AggregationResponseValue> aggregationResponseValues; if (attributeFilter.Values.IsNullOrEmpty()) { // Return all values aggregationResponseValues = aggregationResponse.Values; } else { // Return predefined values aggregationResponseValues = attributeFilter.Values .GroupBy(v => v.Id, StringComparer.OrdinalIgnoreCase) .Select(g => aggregationResponse.Values.FirstOrDefault(v => v.Id.EqualsInvariant(g.Key))) .Where(v => v != null) .ToArray(); } if (aggregationResponseValues.Any()) { result = new Aggregation { AggregationType = "attr", Field = fieldName, Items = GetAttributeAggregationItems(aggregationResponseValues).ToArray(), }; } } return(result); }
public Series FillNA(Aggregation aggValue) { var dat = new List <object>(); var indLst = new List <object>(); var replacedValue = DataFrame._calculateAggregation(this._data.Where(x => x != DataFrame.NAN), aggValue, this._type); for (int i = 0; i < this._data.Count; i++) { if (this[i] != DataFrame.NAN) { dat.Add(this[i]); indLst.Add(this._index[i]); } else { dat.Add(replacedValue); indLst.Add(this._index[i]); } } // return(new Series(dat, indLst, this.Name)); }
public override int GetHashCode() { int hash = 1; if (Name.Length != 0) { hash ^= Name.GetHashCode(); } if (Aggregation != 0) { hash ^= Aggregation.GetHashCode(); } if (Value != 0D) { hash ^= pbc::ProtobufEqualityComparers.BitwiseDoubleEqualityComparer.GetHashCode(Value); } if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
/// <summary> /// Gets the value that matches the specified aggregation type /// </summary> /// <param name="aggregation">The aggregation type</param> /// <param name="defaultIfNull">The value to return if the value that matches the specified aggregation type is null</param> /// <returns>The value that matches the specified aggregation type</returns> public double GetValue(Aggregation aggregation, double defaultIfNull = 0) { switch (aggregation) { case Aggregation.Average: return(this.Average ?? defaultIfNull); case Aggregation.Minimum: return(this.Minimum ?? defaultIfNull); case Aggregation.Maximum: return(this.Maximum ?? defaultIfNull); case Aggregation.Total: return(this.Total ?? defaultIfNull); case Aggregation.Count: return(this.Count ?? defaultIfNull); default: throw new ArgumentException($"Unknown aggregation type: {aggregation}", nameof(aggregation)); } }
public static void CanAggregateLettersUsingFloatingWindow() { var nums = (from n in Enumerable.Range(0, 10) select KeyValue.Create(n, (char)('a' + n))).ToSeries(); var actual = nums.Aggregate(Aggregation.WindowSize <int>(5, Boundary.Skip), segment => segment.Data.Keys.First(), segment => new string(segment.Data.Values.ToArray())); var expected = new SeriesBuilder <int, string> { { 0, "abcde" }, { 1, "bcdef" }, { 2, "cdefg" }, { 3, "defgh" }, { 4, "efghi" }, { 5, "fghij" } }.Series; Assert.AreEqual(expected, actual); }
private static float Aggregate(IGrouping <string, Transaction> period, Aggregation aggregation) { switch (aggregation) { case Aggregation.Count: return(period.Count()); case Aggregation.Avg: return(period.Average(t => t.Amount)); case Aggregation.Sum: return(period.Sum(p => p.Amount)); case Aggregation.Min: return(period.Min(t => t.Amount)); case Aggregation.Max: return(period.Max(t => t.Amount)); default: throw new InvalidOperationException("Unknown aggregation."); } }
private string ConvertAggToSting(Aggregation agg) { string outStr = ""; AggregationAttribute aggAttr; AggregationDimension aggDim; MeasureGroup mg1 = agg.ParentMeasureGroup; foreach (MeasureGroupDimension mgDim in mg1.Dimensions) { aggDim = agg.Dimensions.Find(mgDim.CubeDimensionID); if (aggDim == null) { foreach (CubeAttribute cubeDimAttr in mgDim.CubeDimension.Attributes) { outStr = outStr + "0"; } } else { foreach (CubeAttribute cubeDimAttr in mgDim.CubeDimension.Attributes) { aggAttr = aggDim.Attributes.Find(cubeDimAttr.AttributeID); if (aggAttr == null) { outStr = outStr + "0"; } else { outStr = outStr + "1"; } } } outStr = outStr + ","; } return(outStr.Substring(0, outStr.Length - 1)); }
public void ExcludeWallets_Test() { var merged = new List <Money>() { new Money() { Currency = Model.Enums.Currency.EUR, Amount = 20 } }; var wallets = new List <Wallet>() { new Wallet() { Name = "ssdsdsa", Money = new Money() { Currency = Model.Enums.Currency.EUR, Amount = 1 } }, new Wallet() { Name = "poidg", Money = new Money() { Currency = Model.Enums.Currency.EUR, Amount = 2 } } }; Aggregation.ExcludeWallets(merged, wallets); Assert.AreEqual(17, merged[0].Amount); }
/// <inheritdoc/> public void Visit(Aggregation aggregation) { Ensure.IsNotNull(aggregation, nameof(aggregation)); if (aggregation.PrimaryAggregation == null) { return; } aggregation.PrimaryAggregation.Accept(this); // TODO: do something with the sub aggregations to KQL if (aggregation.SubAggregations != null) { foreach (var(_, subAgg) in aggregation.SubAggregations) { subAgg.Accept(this); aggregation.KustoQL += $"{subAgg.KustoQL}, "; // this won't work when 2+ bucket aggregations are used! } } aggregation.KustoQL += aggregation.PrimaryAggregation.KustoQL; }
private void AssertHavingSpecificationIsValid <TEntity>(int index, Aggregation expectedAggregation, string expectedIdentifier, string expectedOperator, string expectedValue, string expectedAlias = null) { this.Command.Specification.Havings.Should() .NotBeNullOrEmpty(); var specification = this.Command.Specification.Havings[0]; specification.Alias.Should() .Be(expectedAlias); specification.Aggregation.Should() .Be(expectedAggregation); specification.Identifier.Should() .Be(expectedIdentifier); specification.Operator.Should() .Be(expectedOperator); specification.EntityType.Should() .Be(typeof(TEntity)); specification.Value.Should() .Be(expectedValue); }
public void MergeWallets_OneArgument_AggregatedThatList() { var wallets = new List <Wallet>() { new Wallet() { Name = "ddd", Money = new Money() { Currency = Model.Enums.Currency.EUR, Amount = 4 } }, new Wallet() { Name = "ddd", Money = new Money() { Currency = Model.Enums.Currency.EUR, Amount = 4 } } }; var expectedEur = new Money() { Currency = Model.Enums.Currency.EUR, Amount = 8 }; var merged = Aggregation.MergeWallets(wallets); Assert.IsNotNull(merged); Assert.AreEqual(1, merged.Count); Assert.AreEqual(expectedEur.Currency, merged[0].Currency); Assert.AreEqual(expectedEur.Amount, merged[0].Amount); }
public void AggregateSimple() { //Arrange MemorySource <MyRow> source = new MemorySource <MyRow>(); source.DataAsList = new List <MyRow>() { new MyRow { Id = 1, DetailValue = 3.5 }, new MyRow { Id = 2, DetailValue = 4.5 }, new MyRow { Id = 3, DetailValue = 2.0 }, }; Aggregation <MyRow, MyAggRow> agg = new Aggregation <MyRow, MyAggRow>() { AggregationAction = (row, aggRow) => aggRow.AggValue += row.DetailValue }; MemoryDestination <MyAggRow> dest = new MemoryDestination <MyAggRow>(); //Act source.LinkTo(agg); agg.LinkTo(dest); source.Execute(); dest.Wait(); //Assert Assert.Collection <MyAggRow>(dest.Data, ar => Assert.True(ar.AggValue == 10) ); }
private static HtmlBuilder GroupBy( this HtmlBuilder hb, Column groupBy, Column targetColumn, Aggregation aggregation) { var text = groupBy != null ? groupBy.GridLabelText + ": " : string.Empty; switch (aggregation.Type) { case Aggregation.Types.Count: text += Displays.Get(aggregation.Type.ToString()); break; default: text += targetColumn.GridLabelText + " " + Displays.Get(aggregation.Type.ToString()); break; } return(hb.Span(css: "label", action: () => hb .Text(text: text))); }
public static AggregationItem ToAggregationItem(this catalogDto.AggregationItem itemDto, Aggregation aggregationGroup, string currentLanguage) { var result = new AggregationItem { Group = aggregationGroup, Value = itemDto.Value, IsApplied = itemDto.IsApplied ?? false, Count = itemDto.Count ?? 0, Lower = itemDto.RequestedLowerBound, Upper = itemDto.RequestedUpperBound, }; if (itemDto.Labels != null) { result.Label = itemDto.Labels.Where(l => string.Equals(l.Language, currentLanguage, StringComparison.OrdinalIgnoreCase)) .Select(l => l.Label) .FirstOrDefault(); } if (string.IsNullOrEmpty(result.Label) && itemDto.Value != null) { result.Label = itemDto.Value.ToString(); } if (aggregationGroup.Field.EqualsInvariant("__outline")) { result = CategoryAggregationItem.FromAggregationItem(result); } return(result); }
protected abstract void AddHavingSpecification <T>( Expression <Func <T, bool> > selector, Aggregation aggregation, string alias = null);
/// <summary> /// Returns the estimated size of this aggregation. Use this signature which takes in the MeasureGroup when the agg is not attached to a ParentMeasureGroup. /// </summary> /// <param name="agg"></param> /// <param name="mg1"></param> /// <returns></returns> public static EstimatedAggSize GetEstimatedSize(Aggregation agg, MeasureGroup mg1) { double size = 0; double minSize = 0; bool bAggContainsAllGranularityAttributes = true; AggregationAttribute aggAttr; AggregationDimension aggDim; double dblAggCardinality = 1; long iNumSurrogateKeysInAgg = 0; int iMeasureGroupDimensionsCount = 0; long lngMaxAggDimensionCardinality = 0; foreach (MeasureGroupDimension mgDim in mg1.Dimensions) { long iDimGranularityCardinality = 0; double dblDimAggCardinality = 1; bool bDimAggCardinalityFound = false; if (!(mgDim is RegularMeasureGroupDimension)) continue; //m2m dimensions apparently aren't stored in the agg since they're calculated at runtime iMeasureGroupDimensionsCount++; //don't count m2m dimensions MeasureGroupAttribute granularity = null; RegularMeasureGroupDimension regMgDim = (RegularMeasureGroupDimension)mgDim; foreach (MeasureGroupAttribute mgDimAttr in regMgDim.Attributes) { if (mgDimAttr.Type == MeasureGroupAttributeType.Granularity) { iDimGranularityCardinality = mgDimAttr.Attribute.EstimatedCount; granularity = mgDimAttr; break; } } aggDim = agg.Dimensions.Find(mgDim.CubeDimensionID); if (aggDim == null || granularity == null || aggDim.Attributes.Find(granularity.AttributeID) == null) bAggContainsAllGranularityAttributes = false; if (aggDim != null) { foreach (CubeAttribute cubeAttr in mgDim.CubeDimension.Attributes) { aggAttr = aggDim.Attributes.Find(cubeAttr.AttributeID); if (aggAttr != null) { if (!CanReachAttributeFromChildInAgg(aggAttr, mgDim.Dimension.KeyAttribute, false)) //redundant attributes don't increase the cardinality of the attribute { dblDimAggCardinality *= (cubeAttr.Attribute.EstimatedCount == 0 ? 1 : cubeAttr.Attribute.EstimatedCount); } bDimAggCardinalityFound = true; iNumSurrogateKeysInAgg++; //apparently every key, even redundant keys, get stored in the agg } } } if (dblDimAggCardinality > iDimGranularityCardinality) { //shouldn't be more than granularity cardinality because auto-exists prevents that dblDimAggCardinality = (iDimGranularityCardinality == 0 ? 1 : iDimGranularityCardinality); } if (bDimAggCardinalityFound) { dblAggCardinality *= dblDimAggCardinality; if (lngMaxAggDimensionCardinality < dblAggCardinality) lngMaxAggDimensionCardinality = (long)dblDimAggCardinality; } } if (mg1.EstimatedRows != 0 && dblAggCardinality != 0) { long iMeasureBytes = 0; foreach (Microsoft.AnalysisServices.Measure m in mg1.Measures) { if (m.DataType == MeasureDataType.Inherited) { if (m.Source.DataSize > 0) iMeasureBytes += m.Source.DataSize; else if (m.Source.DataType == System.Data.OleDb.OleDbType.Integer) iMeasureBytes += 4; else if (m.Source.DataType == System.Data.OleDb.OleDbType.SmallInt) iMeasureBytes += 2; else if (m.Source.DataType == System.Data.OleDb.OleDbType.TinyInt) iMeasureBytes += 1; else iMeasureBytes += 8; } else { if (m.DataType == MeasureDataType.Integer) iMeasureBytes += 4; else if (m.DataType == MeasureDataType.SmallInt) iMeasureBytes += 2; else if (m.DataType == MeasureDataType.TinyInt) iMeasureBytes += 1; else iMeasureBytes += 8; } } //the size of each row is 4 bytes for each surrogate key plus the size of measures long lngFactTableRowSize = (iMeasureGroupDimensionsCount * 4 + iMeasureBytes); long lngAggRowSize = (iNumSurrogateKeysInAgg * 4 + iMeasureBytes); if (dblAggCardinality > mg1.EstimatedRows) //this is not possible in the data { dblAggCardinality = mg1.EstimatedRows; } //multiply the estimated rows by the size of each row size = ((double)(dblAggCardinality * lngAggRowSize)) / ((double)(mg1.EstimatedRows * lngFactTableRowSize)); //purposefully don't prevent size from being over 1 because an agg can be larger than the fact table if it has more dimension attribute keys than the fact table if (lngMaxAggDimensionCardinality > mg1.EstimatedRows) //this is not possible in the data { lngMaxAggDimensionCardinality = mg1.EstimatedRows; } //calculate the min size (best case scenario when there is lots of sparsity in fact table) so you can present a range to the user and give the user an idea of the uncertainty minSize = ((double)(lngMaxAggDimensionCardinality * lngAggRowSize)) / ((double)(mg1.EstimatedRows * lngFactTableRowSize)); } EstimatedAggSize ret = new EstimatedAggSize(); ret.minSize = minSize; ret.size = size; ret.bAggContainsAllGranularityAttributes = bAggContainsAllGranularityAttributes; ret.agg = agg; return ret; }
public Agg(Aggregation agg, string sCorrectAggregationDesignName, string sReportTitle) { mAggName = agg.Name; mAggDesignName = sCorrectAggregationDesignName; mMeasureGroupName = agg.ParentMeasureGroup.Name; mCubeNameOrReportTitle = (sReportTitle == null ? agg.ParentCube.Name : sReportTitle); mDatabaseName = agg.ParentDatabase.Name; mAttributes = string.Empty; foreach (AggregationDimension ad in agg.Dimensions) { foreach (AggregationAttribute aa in ad.Attributes) { if (!string.IsNullOrEmpty(mAttributes)) mAttributes += "\r\n"; mAttributes += "[" + ad.CubeDimension.Name + "].[" + aa.Attribute.Name + "]"; } } }
public AggregationReportData GetCompareReport() { AggregationReportData response = new AggregationReportData(); response.SalList = new List <BasicReportData.OverallSALTable>(); response.DocumentLibraryTable = new List <DocumentLibraryTable>(); TokenManager tm = new TokenManager(); var aggregationID = tm.PayloadInt("aggreg"); if (aggregationID == null) { return(response); } AggregationManager agManager = new BusinessLogic.AggregationManager(); var assessmentList = agManager.GetAssessmentsForAggregation((int)aggregationID); Aggregation ag = agManager.GetAggregation((int)aggregationID); response.AggregationName = assessmentList.Aggregation.AggregationName; response.Information = new AggInformation() { Assessment_Name = ag.AggregationName, Assessment_Date = ag.AggregationDate, Assessor_Name = ag.AssessorName }; foreach (var a in assessmentList.Assessments) { ReportsDataManager reportsDataManager = new ReportsDataManager(a.AssessmentId); // Incorporate SAL values into response var salTable = reportsDataManager.GetSals(); var entry = new BasicReportData.OverallSALTable(); response.SalList.Add(entry); entry.Alias = a.Alias; entry.OSV = salTable.OSV; entry.Q_CV = ""; entry.Q_IV = ""; entry.Q_AV = ""; entry.LastSalDeterminationType = salTable.LastSalDeterminationType; if (salTable.LastSalDeterminationType != "GENERAL") { entry.Q_CV = salTable.Q_CV; entry.Q_IV = salTable.Q_IV; entry.Q_AV = salTable.Q_AV; } // Document Library var documentLibraryTable = reportsDataManager.GetDocumentLibrary(); foreach (var docEntry in documentLibraryTable) { docEntry.Alias = a.Alias; response.DocumentLibraryTable.Add(docEntry); } } return(response); }
public static void Samples([CallerFilePath] string file = "") { var root = Path.GetDirectoryName(file); // ------------------------------------------------------------ // Creating time series // ------------------------------------------------------------ // [create-builder] var numNames = new SeriesBuilder <int, string>() { { 1, "one" }, { 2, "two" }, { 3, "three" } }.Series; numNames.Print(); // [/create-builder] // [create-heterogen] // Create series builder and use it via 'dynamic' var nameNumsBuild = new SeriesBuilder <string, int>(); dynamic nameNumsDyn = nameNumsBuild; nameNumsDyn.One = 1; nameNumsDyn.Two = 2; nameNumsDyn.Three = 3; // Build series and print it var nameNums = nameNumsBuild.Series; nameNums.Print(); // [/create-heterogen] // [create-ordinal] var rnd = new Random(); var randNums = Enumerable.Range(0, 100) .Select(_ => rnd.NextDouble()).ToOrdinalSeries(); randNums.Print(); // [/create-ordinal] // [create-kvp] var sin = Enumerable.Range(0, 1000) .Select(x => KeyValue.Create(x, Math.Sin(x / 100.0))) .ToSeries(); sin.Print(); // [/create-kvp] // [create-sparse] var opts = Enumerable.Range(0, 10) .Select(x => KeyValue.Create(x, OptionalValue.OfNullable <int>(x))) .ToSparseSeries(); opts.Print(); // [/create-sparse] // [create-csv] var frame = Frame.ReadCsv(Path.Combine(root, "../data/stocks/msft.csv")); var frameDate = frame.IndexRows <DateTime>("Date").SortRowsByKey(); var msftOpen = frameDate.GetColumn <double>("Open"); msftOpen.Print(); // [/create-csv] // ------------------------------------------------------------ // Lookup and slicing // ------------------------------------------------------------ // [lookup-key] // Get value for a specified int and string key var tenth = randNums[10]; var one = nameNums["One"]; // Get first and last value using index var fst = nameNums.GetAt(0); var lst = nameNums.GetAt(nameNums.KeyCount - 1); // [/lookup-key] // [lookup-opt] // Get value as OptionalValue<T> and use it var opt = opts.TryGet(5); if (opt.HasValue) { Console.Write(opt.Value); } // For value types, we can convert to nullable type int?value1 = opts.TryGet(5).AsNullable(); int?value2 = opts.TryGetAt(0).AsNullable(); // [/lookup-opt] // [lookup-ord] // Get value exactly at the specified key var jan3 = msftOpen .Get(new DateTime(2012, 1, 3)); // Get value at a key or for the nearest previous date var beforeJan1 = msftOpen .Get(new DateTime(2012, 1, 1), Lookup.ExactOrSmaller); // Get value at a key or for the nearest later date var afterJan1 = msftOpen .Get(new DateTime(2012, 1, 1), Lookup.ExactOrGreater); // [/lookup-ord] // [lookup-slice] // Get a series starting/ending at // the specified date (inclusive) var msftStartIncl = msftOpen.StartAt(new DateTime(2012, 1, 1)); var msftEndIncl = msftOpen.EndAt(new DateTime(2012, 12, 31)); // Get a series starting/ending after/before // the specified date (exclusive) var msftStartExcl = msftOpen.After(new DateTime(2012, 1, 1)); var msftEndExcl = msftOpen.Before(new DateTime(2012, 12, 31)); // Get prices for 2012 (both keys are inclusive) var msft2012 = msftOpen.Between (new DateTime(2012, 1, 1), new DateTime(2012, 12, 31)); // [/lookup-slice] // ------------------------------------------------------------ // Statistics and calculations // ------------------------------------------------------------ // [calc-stat] // Calculate median & mean price var msftMed = msft2012.Median(); var msftAvg = msft2012.Mean(); // Calculate sum of square differences var msftDiff = msft2012 - msftAvg; var msftSq = (msftDiff * msftDiff).Sum(); // [/calc-stat] // [calc-diff] // Subtract previous day value from current day var msftChange = msft2012 - msft2012.Shift(1); // Use built-in Diff method to do the same var msftChangeAlt = msft2012.Diff(1); // Get biggest loss and biggest gain var minMsChange = msftChange.Min(); var maxMsChange = msftChange.Max(); // [/calc-diff] // [calc-custom] var wackyStat = msft2012.Observations.Select(kvp => kvp.Value / (kvp.Key - msft2012.FirstKey()).TotalDays).Sum(); // [/calc-custom] // ------------------------------------------------------------ // Missing data // ------------------------------------------------------------ // [fill-const-drop] // Fill missing data with constant var fillConst = opts.FillMissing(-1); fillConst.Print(); // Drop keys with no value from the series var drop = opts.DropMissing(); drop.Print(); // [/fill-const-drop] // [fill-dir] // Fill with previous available value var fillFwd = opts.FillMissing(Direction.Forward); fillFwd.Print(); // Fill with the next available value var fillBwd = opts.FillMissing(Direction.Backward); fillBwd.Print(); // [/fill-dir] // ------------------------------------------------------------ // Windows and chunks, grouping // ------------------------------------------------------------ // [aggreg-group] // Group random numbers by the first digit & get distribution var buckets = randNums .GroupBy(kvp => (int)(kvp.Value * 10)) .Select(kvp => OptionalValue.Create(kvp.Value.KeyCount)); buckets.Print(); // [/aggreg-group] // [aggreg-win] // Average over 25 element floating window var monthlyWinMean = msft2012.WindowInto(25, win => win.Mean()); // Get floating window over 5 elements as series of series // and then apply average on each series individually var weeklyWinMean = msft2012.Window(5).Select(kvp => kvp.Value.Mean()); // [/aggreg-win] // [aggreg-chunk] // Get chunks of size 25 and mean each (disjoint) chunk var monthlyChunkMean = msft2012.ChunkInto(25, win => win.Mean()); // Get series containing individual chunks (as series) var weeklyChunkMean = msft2012.Chunk(5).Select(kvp => kvp.Value.Mean()); // [/aggreg-chunk] // [aggreg-pair] // For each key, get the previous value and average them var twoDayAvgs = msft2012.Pairwise().Select(kvp => (kvp.Value.Item1 + kvp.Value.Item2) / 2.0); // [/aggreg-pair] // [aggreg-any] msft2012.Aggregate ( // Get chunks while the month & year of the keys are the same Aggregation.ChunkWhile <DateTime>((k1, k2) => k1.Month == k2.Month && k2.Year == k1.Year), // For each chunk, return the first key as the key and // either average value or missing value if it was empty chunk => KeyValue.Create (chunk.Data.FirstKey(), chunk.Data.ValueCount > 0 ? OptionalValue.Create(chunk.Data.Mean()) : OptionalValue.Empty <double>())); // [/aggreg-any] // ------------------------------------------------------------ // Operations (Select, where) // ------------------------------------------------------------ // [linq-methods] var overMean = msft2012 .Select(kvp => kvp.Value - msftAvg) .Where(kvp => kvp.Value > 0.0).KeyCount; // [/linq-methods] // [linq-query] var underMean = (from kvp in msft2012 where kvp.Value - msftAvg < 0.0 select kvp).KeyCount; // [/linq-query] Console.WriteLine(overMean); Console.WriteLine(underMean); // ------------------------------------------------------------ // Indexing and sampling & resampling // ------------------------------------------------------------ // [index-keys] // Turn DateTime keys into DateTimeOffset keys var byOffs = msft2012.SelectKeys(kvp => new DateTimeOffset(kvp.Key)); // Replace keys with ordinal numbers 0 .. KeyCount-1 var byInt = msft2012.IndexOrdinally(); // [/index-keys] // [index-with] // Replace keys with explictly specified new keys var byDays = numNames.IndexWith(new[] { DateTime.Today, DateTime.Today.AddDays(1.0), DateTime.Today.AddDays(2.0) }); // [/index-with] }
public async Task <ProductOutput> GetSearchAsync(ProductSearchInput productSearchInput) { if (string.IsNullOrEmpty(productSearchInput.Brand) && string.IsNullOrEmpty(productSearchInput.ScreenSize)) { throw new InvalidArgumentException(); } try { var indexName = ElasticSearchItemsConst.ProductIndexName; var searchQuery = new Nest.SearchDescriptor <ProductElasticIndexDto>(); string term = ""; if (productSearchInput.ScreenSize != null) { term = "screensizes"; searchQuery .Query(x => x .Terms(c => c .Verbatim() .Field(p => p.ScreenSize) .Terms(productSearchInput.ScreenSize.ToLower()) )) .Aggregations(a => a .Terms(term, t => t .Field("screenSize.keyword") .MinimumDocumentCount(1)) ); } if (!String.IsNullOrEmpty(productSearchInput.Brand)) { term = "searchingArea"; searchQuery .Query(x => x.Match(m => m.Field(f => f.SearchingArea) .Query(productSearchInput.Brand.ToLower()) .Analyzer("ngram_analyzer") ) ) .Aggregations(a => a .Terms(term, t => t .Field("screenSize.keyword") .MinimumDocumentCount(1)) );; } var searchResultData = await _elasticSearchService.SimpleSearchAsync <ProductElasticIndexDto, int>(indexName, searchQuery); var aggregationResponse = searchResultData.Aggregations.Terms(term); var aggregationList = new List <Aggregation>(); foreach (var item in aggregationResponse.Buckets) { var aggregation = new Aggregation(); aggregation.Key = item.Key; aggregation.DocCount = item.DocCount; aggregationList.Add(aggregation); } var productElasticIndexList = from opt in searchResultData.Documents select new ProductElasticIndexDto { SearchingArea = opt.SearchingArea, Id = opt.Id, Brand = opt.Brand, ModelName = opt.ModelName, ScreenSize = opt.ScreenSize, Price = opt.Price, Stock = opt.Stock, }; var output = new ProductOutput(); output.AggregationList = aggregationList; output.ProductElasticIndexDtoList = productElasticIndexList.ToList(); return(await Task.FromResult(output)); } catch (Exception ex) { return(await Task.FromException <ProductOutput>(ex)); } }
public SimilarAgg(Aggregation agg, Aggregation similaragg, string sCorrectAggregationDesignName, string sReportTitle, Boolean bCountMembers) { mAggName = agg.Name; string sAggEstimate = AggManager.EditAggs.GetEstimatedAggSizeRange(AggManager.EditAggs.GetEstimatedSize(agg)); if (sAggEstimate != null) mAggName += " (" + sAggEstimate + ")"; mAggDesignName = sCorrectAggregationDesignName; mMeasureGroupName = agg.ParentMeasureGroup.Name; mCubeNameOrReportTitle = (sReportTitle == null ? agg.ParentCube.Name : sReportTitle) + ((bCountMembers ? " (Counting attribute members) " : "")); mDatabaseName = agg.ParentDatabase.Name; if (similaragg == null) { mSimilarAggName = ""; } else { mSimilarAggName = similaragg.Name; sAggEstimate = AggManager.EditAggs.GetEstimatedAggSizeRange(AggManager.EditAggs.GetEstimatedSize(similaragg)); if (sAggEstimate != null) mSimilarAggName += " (" + sAggEstimate + ")"; } }
internal static Boolean IsAggregationIncluded(Aggregation agg1, Aggregation agg2, Boolean bCountMembers) { Boolean bIsAttribute1Included = false; foreach (MeasureGroupDimension mgDim in agg1.ParentMeasureGroup.Dimensions) { AggregationDimension dim1 = agg1.Dimensions.Find(mgDim.CubeDimensionID); AggregationDimension dim2 = agg2.Dimensions.Find(mgDim.CubeDimensionID); if ((dim1 == null || dim1.Attributes.Count == 0) && (dim2 == null || dim2.Attributes.Count == 0)) // both at the All level... continue continue; if ((dim1 != null && dim1.Attributes.Count > 0) && (dim2 == null || dim2.Attributes.Count == 0)) // dim2 aggregates at All level so it's not possible it being more granular than dim1, // then agg2 cannot contain agg1 return false; if ((dim1 == null || dim1.Attributes.Count == 0) && (dim2 != null && dim2.Attributes.Count > 0)) // dim1 aggregates at All level so it's probable that all aggregation being included in dim2, // but still have to evaluate the rest of attributes continue; if ((dim1 != null && dim1.Attributes.Count > 0) && (dim2 != null && dim2.Attributes.Count > 0)) // both dim1 and dim2 have aggregations at lower level than All, so they need to be evaluated { // For both Dim1 and Dim2 attributes, purge those attributes that are redundant AggregationDimension dim1Purged = RemoveRedundantAttributes(dim1); AggregationDimension dim2Purged = RemoveRedundantAttributes(dim2); foreach (AggregationAttribute att1 in dim1Purged.Attributes) { if (dim2Purged.Attributes.Contains(att1.AttributeID)) continue; Boolean bExistsAttributeInSameTree = false; foreach (AggregationAttribute att2 in dim2Purged.Attributes) { bIsAttribute1Included = IsRedundantAttribute(agg1.ParentMeasureGroup, dim1.CubeDimensionID, att1.AttributeID, att2.AttributeID, false, -1); //bIsAttribute2Included = IsRedundantAttribute(agg1.ParentMeasureGroup, dim1.CubeDimensionID, att2.AttributeID, att1.AttributeID, false, -1); if (bIsAttribute1Included) // Attribute att1 is included in att2, then if countmembers is turned on // ponderated ratio will be calculated // else go out for another attribute { if (bCountMembers) { if (!IsRedundantAttribute(agg1.ParentMeasureGroup, dim1.CubeDimensionID, att1.AttributeID, att2.AttributeID, true, -1)) // If included but member count differ vastly, then report that agg1 is not included in agg2 return false; } else { bExistsAttributeInSameTree = true; break; } } } if (!bExistsAttributeInSameTree) // if dim1 does not have attributes in same tree as dim2, then agg1 is not included. return false; } } } // Finally if all dim1 are equal or included in dim2 then aggregation1 is included in aggregation2 return true; }
/// <summary> /// Helper function takes aggregation as input and returns string representation of aggregation /// </summary> private string ConvertAggToSting(Aggregation agg) { string outStr = ""; AggregationAttribute aggAttr; AggregationDimension aggDim; foreach (MeasureGroupDimension mgDim in mg1.Dimensions) { aggDim = agg.Dimensions.Find(mgDim.CubeDimensionID); if (aggDim == null) { foreach (CubeAttribute cubeDimAttr in mgDim.CubeDimension.Attributes) outStr = outStr + "0"; } else { foreach (CubeAttribute cubeDimAttr in mgDim.CubeDimension.Attributes) { aggAttr = aggDim.Attributes.Find(cubeDimAttr.AttributeID); if (aggAttr == null) outStr = outStr + "0"; else outStr = outStr + "1"; } } outStr = outStr + ","; } return outStr.Substring(0, outStr.Length - 1); }
private void SetEstimatedSize(Aggregation agg) { double size = 0; double minSize = 0; bool bAggContainsAllGranularityAttributes = true; try { EstimatedAggSize oEstSize = GetEstimatedSize(agg, mg1); size = oEstSize.size; minSize = oEstSize.minSize; bAggContainsAllGranularityAttributes = oEstSize.bAggContainsAllGranularityAttributes; } catch { } finally { if (size != 0) { if (minSize != 0 && minSize < size && !bAggContainsAllGranularityAttributes) { lblEstimatedSize.Text = agg.Name + " Estimated Size: " + (minSize * 100).ToString("#0.00") + "% to " + (size * 100).ToString("#0.00") + "% of fact data"; lblEstimatedSize.ForeColor = (size > .3333 ? Color.Red : Color.Black); } else { lblEstimatedSize.Text = agg.Name + " Estimated Size: " + (size * 100).ToString("#0.00") + "% of fact data"; lblEstimatedSize.ForeColor = (size > .3333 ? Color.Red : Color.Black); } } else { lblEstimatedSize.Text = agg.Name + " Estimated Size: Unknown (please update EstimatedRows on measure group)"; lblEstimatedSize.ForeColor = Color.Black; } } }
private Aggregation GetAggregationFromString(string aggregationName, string instr) { Aggregation agg = new Aggregation(); agg.Name = aggregationName; string a1; int dimNum = 0; int attrNum = 0; bool newDim = true; for (int i = 0; i < instr.Length; i++) { a1 = instr[i].ToString(); switch (a1) { case ",": dimNum++; attrNum = -1; newDim = true; break; case "0": break; case "1": if (newDim) { agg.Dimensions.Add(dimIDs[dimNum]); newDim = false; } agg.Dimensions[dimIDs[dimNum]].Attributes.Add(dimAttributes[dimNum, attrNum]); break; default: break; } attrNum++; } return agg; }
protected abstract void AddHavingSpecification <TEntity>(string alias, string tableName, string tableSchema, string name, Aggregation aggregation, Comparison comparison, object value);
public MissingAggregationPerformance(AggregationPerformance aggP, Aggregation[] missingAggregations, Dictionary<Aggregation, int> aggHits) { _aggP = aggP; _missingAggregation = missingAggregations; _aggHits = aggHits; }
public void Reset() { this.BeginAggregation = null; this.EndAggregation = null; }
public AggregationPerformance(Aggregation a) { _agg = a; }
internal static object _calculateAggregation(IEnumerable <object> vals, Aggregation aggregation, ColType colType) { switch (aggregation) { case Aggregation.None: return(null); case Aggregation.Unique: return(vals.Distinct().Count()); case Aggregation.Top: return(vals.ToArray().FrequencyOf().First().Item1); case Aggregation.Random: var ind = Constant.rand.Next(vals.Count()); return(vals.ToArray().ElementAt(ind)); case Aggregation.Frequency: return(vals.ToArray().FrequencyOf().First().Item2); case Aggregation.First: return(vals.First()); case Aggregation.Last: return(vals.Last()); case Aggregation.Count: return(vals.Count()); case Aggregation.Sum: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).Sum()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).Sum()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).Sum()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).Sum()); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } case Aggregation.Avg: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).Average()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).Average()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).Average()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).Average()); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } case Aggregation.Min: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).Min()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).Min()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).Min()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).Min()); } else if (colType == ColType.DT) //datetime { return(vals.Select(x => Convert.ToDateTime(x, CultureInfo.InvariantCulture)).Min()); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } case Aggregation.Max: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).Max()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).Max()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).Max()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).Max()); } else if (colType == ColType.DT) //datetime { return(vals.Select(x => Convert.ToDateTime(x, CultureInfo.InvariantCulture)).Max()); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } //Standard deviation case Aggregation.Std: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).ToArray().Stdev()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).ToArray().Stdev()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).ToArray().Stdev()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).ToArray().Stdev()); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } case Aggregation.Mode: return(vals.ToArray().ModeOf <object>()); case Aggregation.Median: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).ToArray().MedianOf()); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).ToArray().MedianOf()); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).ToArray().MedianOf()); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).ToArray().MedianOf()); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } //25% percentage quantile case Aggregation.FirstQuartile: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).ToArray().Percentile(25)); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).ToArray().Percentile(25)); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).ToArray().Percentile(25)); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).ToArray().Percentile(25)); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } //75% percentage quantile case Aggregation.ThirdQuartile: { if (colType == ColType.I2) //boolean { return(DataFrame.NAN); } else if (colType == ColType.I32) //int { return(vals.Select(x => Convert.ToInt32(x, CultureInfo.InvariantCulture)).ToArray().Percentile(75)); } else if (colType == ColType.I64) //long { return(vals.Select(x => Convert.ToInt64(x, CultureInfo.InvariantCulture)).ToArray().Percentile(75)); } else if (colType == ColType.F32) //float { return(vals.Select(x => Convert.ToSingle(x, CultureInfo.InvariantCulture)).ToArray().Percentile(75)); } else if (colType == ColType.DD) //double { return(vals.Select(x => Convert.ToDouble(x, CultureInfo.InvariantCulture)).ToArray().Percentile(75)); } else if (colType == ColType.DT) //datetime { return(DataFrame.NAN); } else if (colType == ColType.IN) //Categorical { return(DataFrame.NAN); } else if (colType == ColType.STR) //String { return(DataFrame.NAN); } else { return(DataFrame.NAN); } } default: throw new Exception("DataType is not known."); } }
public AggValidationWarning(Aggregation agg, string sCorrectAggregationDesignName, string sWarning, string sReportTitle) { mAggName = agg.Name; mAggDesignName = sCorrectAggregationDesignName; mMeasureGroupName = agg.ParentMeasureGroup.Name; mCubeNameOrReportTitle = (sReportTitle == null ? agg.ParentCube.Name : sReportTitle); mDatabaseName = agg.ParentDatabase.Name; mWarning = sWarning; }
public void GroupingUsingDynamicObject() { //Arrange MemorySource <ExpandoObject> source = new MemorySource <ExpandoObject>(); dynamic row1 = new ExpandoObject(); row1.ClassName = "Class1"; row1.DetailValue = 3.5; dynamic row2 = new ExpandoObject(); row2.ClassName = "Class1"; row2.DetailValue = 6.5; dynamic row3 = new ExpandoObject(); row3.ClassName = "Class2"; row3.DetailValue = 10; source.Data.Add(row1); source.Data.Add(row2); source.Data.Add(row3); Aggregation <ExpandoObject, ExpandoObject> agg = new Aggregation <ExpandoObject, ExpandoObject>( (row, aggValue) => { dynamic r = row as ExpandoObject; dynamic a = aggValue as ExpandoObject; if (!((IDictionary <String, object>)a).ContainsKey("AggValue")) { a.AggValue = r.DetailValue; } else { a.AggValue += r.DetailValue; } }, row => { dynamic r = row as ExpandoObject; return(r.ClassName); }, (key, agg) => { dynamic a = agg as ExpandoObject; a.GroupName = (string)key; }); MemoryDestination <ExpandoObject> dest = new MemoryDestination <ExpandoObject>(); //Act source.LinkTo(agg); agg.LinkTo(dest); source.Execute(); dest.Wait(); //Assert Assert.Collection <ExpandoObject>(dest.Data, ar => { dynamic a = ar as ExpandoObject; Assert.True(a.AggValue == 10 && a.GroupName == "Class1"); }, ar => { dynamic a = ar as ExpandoObject; Assert.True(a.AggValue == 10 && a.GroupName == "Class2"); } ); }
protected abstract void AddColumnSelection <T>( string name, string showname, string alias = null, Aggregation aggregation = Aggregation.None);
public async Task <ConsumptionOverview> GetConsumption(int servicelocationid, DateTime from, DateTime to, Aggregation aggregation) { HttpClient wc = new HttpClient(); string url = string.Format(resourceurl + "servicelocation/{0}/consumption?aggregation={1}&from={2}&to={3}", servicelocationid, Convert.ToInt32(aggregation), ConvertToUTCTimestamp(from) * 1000, ConvertToUTCTimestamp(to) * 1000 ); Debug.WriteLine("Consumption url= " + url); //Add access _token to header wc.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _token.access_token); var res = await wc.GetStringAsync(url); Debug.WriteLine(res); var result = JsonConvert.DeserializeObject <ConsumptionOverview>(res); return(result); }
/// <summary> /// Returns the estimated size of this aggregation. /// </summary> /// <param name="agg"></param> /// <returns></returns> public static EstimatedAggSize GetEstimatedSize(Aggregation agg) { return GetEstimatedSize(agg, agg.ParentMeasureGroup); }
Tuple <IIndex <TNewKey>, IVector <R> > IIndexBuilder.Aggregate <K, TNewKey, R>(IIndex <K> index, Aggregation <K> aggregation, VectorConstruction vector, Func <Tuple <DataSegmentKind, Tuple <IIndex <K>, VectorConstruction> >, Tuple <TNewKey, R> > selector) { if (!index.IsOrdered) { throw new InvalidOperationException("Floating window aggregation and chunking is only supported on ordered indices. Consider sorting the series before calling the operation."); } IIndexBuilder indexBuilder1 = (IIndexBuilder)this; Aggregation <K> aggregation1 = aggregation; IEnumerable <Tuple <DataSegmentKind, long, long> > tuples1; switch (aggregation1.get_Tag()) { case 0: Aggregation <K> .WindowSize windowSize = (Aggregation <K> .WindowSize)aggregation1; tuples1 = Seq.windowRangesWithBounds((long)windowSize.item1, windowSize.item2, index.KeyCount); break; case 2: tuples1 = (IEnumerable <Tuple <DataSegmentKind, long, long> >)SeqModule.Map <Tuple <long, long>, Tuple <DataSegmentKind, long, long> >((Func <M0, M1>) new LinearIndex.locations(), (IEnumerable <M0>)Seq.windowRangesWhile <K>(((Aggregation <K> .WindowWhile)aggregation1).item, (IEnumerable <K>)index.Keys)); break; case 3: tuples1 = (IEnumerable <Tuple <DataSegmentKind, long, long> >)SeqModule.Map <Tuple <long, long>, Tuple <DataSegmentKind, long, long> >((Func <M0, M1>) new LinearIndex.locations(), (IEnumerable <M0>)Seq.chunkRangesWhile <K>(((Aggregation <K> .ChunkWhile)aggregation1).item, (IEnumerable <K>)index.Keys)); break; default: Aggregation <K> .ChunkSize chunkSize = (Aggregation <K> .ChunkSize)aggregation1; tuples1 = Seq.chunkRangesWithBounds((long)chunkSize.item1, chunkSize.item2, index.KeyCount); break; } IEnumerable <Tuple <DataSegmentKind, long, long> > tuples2 = tuples1; IEnumerable <Tuple <DataSegmentKind, Tuple <IIndex <K>, VectorConstruction> > > tuples3 = (IEnumerable <Tuple <DataSegmentKind, Tuple <IIndex <K>, VectorConstruction> > >)SeqModule.Map <Tuple <DataSegmentKind, long, long>, Tuple <DataSegmentKind, Tuple <IIndex <K>, VectorConstruction> > >((Func <M0, M1>) new LinearIndex.vectorConstructions <K>(index, vector), (IEnumerable <M0>)tuples2); Tuple <TNewKey, R>[] tupleArray1 = (Tuple <TNewKey, R>[])ArrayModule.OfSeq <Tuple <TNewKey, R> >((IEnumerable <M0>)SeqModule.Map <Tuple <DataSegmentKind, Tuple <IIndex <K>, VectorConstruction> >, Tuple <TNewKey, R> >((Func <M0, M1>)selector, (IEnumerable <M0>)tuples3)); IIndexBuilder indexBuilder2 = indexBuilder1; Func <Tuple <TNewKey, R>, TNewKey> Func1 = (Func <Tuple <TNewKey, R>, TNewKey>) new LinearIndex.newIndex <TNewKey, R>(); Tuple <TNewKey, R>[] tupleArray2 = tupleArray1; if ((object)tupleArray2 == null) { throw new ArgumentNullException("array"); } TNewKey[] array = new TNewKey[tupleArray2.Length]; IIndexBuilder indexBuilder3 = indexBuilder2; for (int index1 = 0; index1 < array.Length; ++index1) { array[index1] = Func1.Invoke(tupleArray2[index1]); } IIndex <TNewKey> index2 = indexBuilder3.Create <TNewKey>(System.Array.AsReadOnly <TNewKey>(array), (FSharpOption <bool>)null); IVectorBuilder vectorBuilder1 = this.vectorBuilder; Func <Tuple <TNewKey, R>, R> Func2 = (Func <Tuple <TNewKey, R>, R>) new LinearIndex.vect <TNewKey, R>(); Tuple <TNewKey, R>[] tupleArray3 = tupleArray1; if ((object)tupleArray3 == null) { throw new ArgumentNullException("array"); } R[] optionalValueArray = new R[tupleArray3.Length]; IVectorBuilder vectorBuilder2 = vectorBuilder1; for (int index1 = 0; index1 < optionalValueArray.Length; ++index1) { optionalValueArray[index1] = Func2.Invoke(tupleArray3[index1]); } IVector <R> missing = vectorBuilder2.CreateMissing <R>(optionalValueArray); return(new Tuple <IIndex <TNewKey>, IVector <R> >(index2, missing)); }
public async Task<ConsumptionOverview> GetConsumption(int servicelocationid, DateTime from, DateTime to, Aggregation aggregation) { HttpClient wc = new HttpClient(); string url = string.Format(resourceurl + "servicelocation/{0}/consumption?aggregation={1}&from={2}&to={3}", servicelocationid, Convert.ToInt32(aggregation), ConvertToUTCTimestamp(from) * 1000, ConvertToUTCTimestamp(to) * 1000 ); Debug.WriteLine("Consumption url= " + url); //Add access _token to header wc.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer" , _token.access_token); var res = await wc.GetStringAsync(url); Debug.WriteLine(res); var result = JsonConvert.DeserializeObject<ConsumptionOverview>(res); return result; }