static void Main(string[] args) { //// FORGY ------------------------------------------- Console.WriteLine("FORGY:"); Console.WriteLine(""); int numberOfCenters_F = 30; Forgy forgy = new Forgy(numberOfCenters_F); int iterationsOfForgy = 0; forgy.TupleToObjects(); forgy.RandCenters(); forgy.AssignToCenter(); Console.WriteLine("Poczatkowy przydział: "); for (int i = 0; i < forgy.centerList.Count; i++) { Console.WriteLine("Do centrum " + i + " przypisano " + forgy.centerList.ElementAt(i).listOfObsPoint.Count + " punktow obserwacji."); } Console.WriteLine(""); do { forgy.CentreCoordinatesCorrection(); forgy.AssignToCenter(); forgy.MSECalculation(); iterationsOfForgy++; } while (!forgy.CheckTheDiff()); Console.WriteLine("Po " + iterationsOfForgy + " korektach koordynatow centrów: "); for (int i = 0; i < forgy.centerList.Count; i++) { Console.WriteLine("Do centrum " + i + " przypisano " + forgy.centerList.ElementAt(i).listOfObsPoint.Count + " punktow obserwacji."); } Console.WriteLine(""); forgy.fileOp.SaveCenterList(forgy.centerList, "Centers_F_DF.txt"); forgy.fileOp.SaveObservableList(forgy.observableList, "Observable_F_DF.txt"); forgy.fileOp.SaveMSEList(forgy.mseValues, "MSE_F.txt"); //// RANDOM PARTITION -------------------------------- Console.WriteLine("RANDOM PARTITION:"); Console.WriteLine(""); int numberOfCenters_RP = 30; RandomPartition randPartition = new RandomPartition(numberOfCenters_RP); int iterationsOfRandomPartition = 0; randPartition.TupleToObjects(); randPartition.RandomAssignToCenters(); randPartition.CentreCoordinatesCorrection(); randPartition.AssignToCenter(); Console.WriteLine("Poczatkowy przydział: "); for (int i = 0; i < randPartition.centerList.Count; i++) { Console.WriteLine("Do centrum " + i + " przypisano " + randPartition.centerList.ElementAt(i).listOfObsPoint.Count + " punktow obserwacji."); } Console.WriteLine(""); do { randPartition.CentreCoordinatesCorrection(); randPartition.AssignToCenter(); randPartition.MSECalculation(); iterationsOfRandomPartition++; } while (!randPartition.CheckTheDiff()); Console.WriteLine("Po " + iterationsOfRandomPartition + " korektach koordynatow centrów: "); for (int i = 0; i < randPartition.centerList.Count; i++) { Console.WriteLine("Do centrum " + i + " przypisano " + randPartition.centerList.ElementAt(i).listOfObsPoint.Count + " punktow obserwacji."); } Console.WriteLine(""); randPartition.fileOp.SaveCenterList(randPartition.centerList, "Centers_RP_DF.txt"); randPartition.fileOp.SaveObservableList(randPartition.observableList, "Observable_RP_DF.txt"); randPartition.fileOp.SaveMSEList(randPartition.mseValues, "MSE_RP.txt"); }
public void initialization() { numberOfCenters = 6; randPartition = new RandomPartition(numberOfCenters); }
internal DLinqTeeNode(Type outputType, bool isForked, Expression queryExpr, DLinqQueryNode child) : base(QueryNodeType.Tee, child.QueryGen, queryExpr, child) { this.m_outputType = outputType; this.m_opName = "Tee"; this.IsForked = isForked; this.m_partitionCount = child.OutputPartition.Count; PartitionInfo pinfo = new RandomPartition(child.OutputDataSetInfo.partitionInfo.Count); this.m_outputDataSetInfo = new DataSetInfo(pinfo, DataSetInfo.NoOrderBy, DataSetInfo.NoDistinct); this.m_dynamicManager = this.InferDynamicManager(); }
internal DLinqForkNode(LambdaExpression fork, Expression keysExpr, Expression queryExpr, DLinqQueryNode child) : base(QueryNodeType.Fork, child.QueryGen, queryExpr, child) { this.m_forkLambda = fork; this.m_keysExpression = keysExpr; this.m_opName = "Fork"; ExpressionSimplifier<object> evaluator = new ExpressionSimplifier<object>(); this.m_keys = null; this.m_keysIdx = -1; if (keysExpr != null) { this.m_keys = evaluator.Eval(keysExpr); this.m_keysIdx = DryadLinqObjectStore.Put(m_keys); } this.m_partitionCount = child.OutputPartition.Count; PartitionInfo pinfo = new RandomPartition(child.OutputDataSetInfo.partitionInfo.Count); this.m_outputDataSetInfo = new DataSetInfo(pinfo, DataSetInfo.NoOrderBy, DataSetInfo.NoDistinct); this.m_dynamicManager = this.InferDynamicManager(); // Finally, create all the children of this: if (keysExpr == null) { Type forkTupleType = fork.Type.GetGenericArguments()[1]; if (forkTupleType.GetGenericTypeDefinition() == typeof(IEnumerable<>)) { forkTupleType = forkTupleType.GetGenericArguments()[0]; } Type[] queryTypeArgs = forkTupleType.GetGenericArguments(); this.m_outputTypes = new Type[queryTypeArgs.Length]; for (int i = 0; i < queryTypeArgs.Length; i++) { this.m_outputTypes[i] = queryTypeArgs[i]; DLinqQueryNode parentNode = new DLinqTeeNode(queryTypeArgs[i], true, queryExpr, this); } } else { int forkCnt = ((Array)m_keys).Length; Type forkType = fork.Type.GetGenericArguments()[0]; this.m_outputTypes = new Type[forkCnt]; for (int i = 0; i < forkCnt; i++) { this.m_outputTypes[i] = forkType; DLinqQueryNode parentNode = new DLinqTeeNode(forkType, true, queryExpr, this); } } }
private DataSetInfo ComputeOutputDataSetInfo() { PartitionInfo pinfo = new RandomPartition(this.m_partitionCount); OrderByInfo oinfo = DataSetInfo.NoOrderBy; DistinctInfo dinfo = DataSetInfo.NoDistinct; return new DataSetInfo(pinfo, oinfo, dinfo); }
// Used to support the Left-homomorphic binary ApplyPerPartition. // With a TeeNode, it does a broadcast, creating n partitions each collating the complete data. internal DLinqMergeNode(Int32 parCount, Expression queryExpr, DLinqQueryNode child) : base(QueryNodeType.Merge, child.QueryGen, queryExpr, child) { this.m_opName = "Merge"; this.m_keySelectExpr = null; this.m_comparerExpr = null; this.m_isDescending = false; this.m_keepPortOrder = false; this.m_comparer = null; this.m_comparerIdx = -1; this.m_dynamicManager = DynamicManager.None; this.m_partitionCount = parCount; PartitionInfo pinfo = new RandomPartition(parCount); this.m_outputDataSetInfo = new DataSetInfo(pinfo, DataSetInfo.NoOrderBy, DataSetInfo.NoDistinct); this.m_isSplitting = false; }
private DataSetInfo ComputeOutputDataSetInfo(bool isLocalReduce) { // TBD: could do a bit better with DistinctInfo. DataSetInfo childInfo = this.Children[0].OutputDataSetInfo; if (isLocalReduce) { // Partial aggregation node. No need to do anything. PartitionInfo pinfo = new RandomPartition(this.m_partitionCount); OrderByInfo oinfo = DataSetInfo.NoOrderBy; DistinctInfo dinfo = DataSetInfo.NoDistinct; return new DataSetInfo(pinfo, oinfo, dinfo); } else if (this.m_resSelectExpr == null || this.m_seedExpr != null) { // Build the new key selection expression (based on group key): ParameterExpression param = Expression.Parameter(this.OutputTypes[0], "g"); PropertyInfo propInfo = param.Type.GetProperty("Key"); Expression body = Expression.Property(param, propInfo); Type dType = typeof(Func<,>).MakeGenericType(param.Type, body.Type); LambdaExpression keySelExpr = Expression.Lambda(dType, body, param); PartitionInfo pinfo = childInfo.partitionInfo.Create(keySelExpr); OrderByInfo oinfo = DataSetInfo.NoOrderBy; if (this.m_opName == "OrderedGroupBy") { oinfo = childInfo.orderByInfo.Create(keySelExpr); } DistinctInfo dinfo = DataSetInfo.NoDistinct; return new DataSetInfo(pinfo, oinfo, dinfo); } else { ParameterExpression param = Expression.Parameter(this.m_keySelectExpr.Body.Type, "k"); Type dType = typeof(Func<,>).MakeGenericType(param.Type, param.Type); LambdaExpression keySelExpr = Expression.Lambda(dType, param, param); PartitionInfo pinfo = childInfo.partitionInfo.Create(keySelExpr); pinfo = pinfo.Rewrite(this.m_resSelectExpr, this.m_resSelectExpr.Parameters[0]); OrderByInfo oinfo = DataSetInfo.NoOrderBy; if (this.m_opName == "OrderedGroupBy") { oinfo = childInfo.orderByInfo.Create(keySelExpr); oinfo = oinfo.Rewrite(this.m_resSelectExpr, param); } DistinctInfo dinfo = DataSetInfo.NoDistinct; return new DataSetInfo(pinfo, oinfo, dinfo); } }