static public IEnumerable <double> _Dbls_assumeTotalPositive <TKey>(nilnul.obj.Bag1 <TKey> bag, IComparer <TKey> order) { return(_Quotients_assumeTotalPositive( (bag), order ).Select(q => nilnul.num.quotient.to_._DblX.ByCastNumDen(q)));; }
static public IEnumerable <nilnul.num.Quotient1> _Quotients_assumeTotalPositive <TKey>(nilnul.obj.Bag1 <TKey> bag, IComparer <TKey> order) { return(_Quotients_assumeTotalPositive( (bag).OrderBy(x => x.Key, order) )); }
static public IEnumerable <nilnul.num.Quotient1> _Quotients_assumeTotalPositive <TKey>(nilnul.obj.Bag1 <TKey> bag) { return(_Quotients_assumeTotalPositive((IEnumerable <KeyValuePair <TKey, nilnul.Num1> >)(bag))); }
static public Dictionary <TKey, ProbDbl> _ProbInVowedDbl_assumeTotalPositive <TKey>(nilnul.obj.Bag1 <TKey> bag) { var dict = new Dictionary <TKey, ProbDbl>(bag.eq); foreach (var item in _ProbInQuotient_assumeTotalPositive(bag)) { dict.Add( item.Key , new ProbDbl( nilnul.num.quotient.to_._DblX.ByCastNumDen(item.Value) ) ); } return(dict); }
/// <summary> /// use the default order, which is order by hash, thus not explicitly specified. /// </summary> /// <typeparam name="TKey"></typeparam> /// <param name="bag"></param> /// <returns></returns> static public Dictionary <TKey, nilnul.num.Quotient1> _ProbInQuotient_assumeTotalPositive <TKey>(nilnul.obj.Bag1 <TKey> bag) { var total = bag.cardinality; var dict = new Dictionary <TKey, Quotient1>( bag.eq); foreach (var item in bag) { dict.Add( item.Key, nilnul.num.Quotient1.CreateByDivide(item.Value, total) ); } return(dict); }
public IEnumerable <(nilnul.data.mining._associater.Association <string>, double)> getRules( IEnumerable <Observation> observations ) { var minSupport = (observations.Count() * _support); var itemCountS = new nilnul.txt.Bag1( observations.SelectMany(s => s) ); var supportedItems = new nilnul.txt.Bag1( itemCountS.Where(x => (double)x.Value.en >= minSupport) ); var frequentItemSetS = new nilnul.obj.Bag1 <IEnumerable <string> >( new NotNull2 <IEqualityComparer <IEnumerable <string> > >( new nilnul.obj.str_.seq.Eq <string>() ) ); supportedItems.Each( component => { frequentItemSetS.add( new[] { component.Key } ); } ); var itemSetCardinality = 1; while (true) { var itemsInConsideration = new nilnul.txt.Set(frequentItemSetS.Keys.SelectMany(x => x)); var newFreqItemSets = new nilnul.obj.Bag1 <IEnumerable <string> >( new NotNull2 <IEqualityComparer <IEnumerable <string> > >( new nilnul.obj.str_.seq.Eq <string>() ) ); itemSetCardinality++; observations.Each( observation => { var intersected = nilnul.set.op_.binary_._IntersectX.Intersect( itemsInConsideration , observation ); var combinated = nilnul.set.family.op_.of_.set_.combinate_._ByIndexsX._Cord_assumeDistinct( intersected, (itemSetCardinality) ); combinated.Each( combinatedInstance => newFreqItemSets.add( combinatedInstance ) ); } ); newFreqItemSets.removeKeys_ofFinite( newFreqItemSets.Where(x => (double)x.Value.en < minSupport).Select(y => y.Key).ToArray() ); if (newFreqItemSets.None()) { ///The algorithm gets terminated when the frequent itemsets cannot be extended further. break; } else { frequentItemSetS = newFreqItemSets; } } var rules = new List <(nilnul.data.mining._associater.Association <string>, double)>(); ///now we get the frequent itemSetS. ///to extract rules from each set. /// foreach (var frequentSet in frequentItemSetS) { for (int i = 1 /*0*/; i < /*=*/ frequentSet.Key.Count(); i++) { foreach ( var combinated in nilnul.set.family.op_.of_.set_.combinate_._ByIndexsX._Cord_assumeDistinct( frequentSet.Key , i ) ) { var complement = frequentSet.Key.Except(combinated) ; rules.Add( ( new mining._associater.Association <string>( combinated , complement ) , nilnul.stat.dist_.finite_.multivar_.binary.observation.str._ConfidenceX.Confidence( observations.Select(s => new HashSet <string>(s)) , combinated, complement ) ) ); } } } ///now we get the ruleGrpS /// return(rules.Where(x => x.Item2 >= this._confidence)); }