public void CreateItemClusters() { // this is equal to the number of possible genres (in the MovieLens 1M dataset that is 18) int numFeatures = 18; double[,] genreFeatures = new double[Items.Count, numFeatures]; var itemIds = new List <string>(); int i = 0; foreach (var item in Items.Values) { itemIds.Add(item.Id); string genresStr = item.Properties["genres"]; var genres = genresStr.Split('|'); foreach (string g in genres) { genreFeatures[i, _mapper.ToInternalID(g)] = 1; } i++; } _itemsCluster = Clusterer.Cluster(itemIds, genreFeatures, NumDomains); }
public static void Run() { //const string path = @"F:\thoughts\"; while (true) { Console.WriteLine("Write path to file(s):"); var path = Console.ReadLine(); Console.WriteLine("Where to save results?"); var savePath = Console.ReadLine(); if (path != null) { var fileReader = new FileReader(path); var documents = fileReader.PerformRead(); var clusterableDocuments = new MyDocumentToClusterableEntityMapper().MapAllData(documents); var kMeansDatas = new ClusterableEntityToKMeansEntityMapper().MapAllData(clusterableDocuments); var clusterer = new Clusterer(7); clusterer.Cluster(kMeansDatas, null); var results = new ClusteredDataResultWriter().WriteResults(kMeansDatas); if (savePath != null) { File.WriteAllText(savePath, results); } } Console.WriteLine($"Amount of exceptions while reading: {exceptionsCount}"); Console.Read(); } }
public void Go() { var slow_VI = new List <myClusterable>(); slow_VI = GetData(); Console.WriteLine("Raw unclustered data:\n"); Console.WriteLine("----------------------"); ShowData(slow_VI, 1, true, false); var c = new Clusterer(); c.Data = slow_VI; Console.WriteLine("\nSetting numClusters to 3"); Console.WriteLine("Starting clustering using k-means algorithm"); int numClusters = c.numCentroids = 3; int[] results = c.Cluster(); Console.WriteLine("Clustering complete\n"); Console.WriteLine("Final clustering in internal form:\n"); ShowVector(results, true); Console.WriteLine("Raw data by cluster:\n"); ShowClustered(slow_VI, results, numClusters, 1); Console.WriteLine("OK, done"); //Console.ReadLine(); }
public void TestNmfClusting() { var reader = new AmazonReader(Paths.AmazonBooksRatings); var dataset = new Dataset <ItemRating>(reader); var clusterer = new Clusterer(dataset); clusterer.ClusterNmf(5, Paths.AmazonBooksUsersCluster); }
protected void WriteLayerClusterer(Clusterer clusterer) { if (clusterer is FlareClusterer) { WriteFlareClusterer(clusterer as FlareClusterer); } else { throw new NotSupportedException(Resources.Strings.ExceptionClusterer); } }
public ClusterHashFunction(Clusterer _enclosing, com.epl.geometry.EditShape shape, com.epl.geometry.Point2D origin, double sqr_tolerance, double inv_cell_size, int hash_values) { this._enclosing = _enclosing; this.m_shape = shape; this.m_sqr_tolerance = sqr_tolerance; this.m_inv_cell_size = inv_cell_size; this.m_origin = origin; this.m_hash_values = hash_values; this.m_pt.SetNaN(); this.m_pt_2.SetNaN(); }
public void CreateClustersEpinion() { var reader = new EpinionReader(Paths.EpinionRatings); var dataset = new Dataset<ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 2; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.EpinionUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.EpinionItemsCluster + i + ".csv", i, 5); } }
public void CreateClustersAmazon() { var reader = new AmazonReader(Paths.AmazonMusicRatings); var dataset = new Dataset<ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 14; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.AmazonMusicUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.AmazonMusicItemsCluster + i + ".csv", i, 5); } }
public void CreateClustersMovieLens() { var reader = new MovieLensReader(Paths.MovieLens1M); var dataset = new Dataset<ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 2; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.MovieLens1MUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.MovieLens1MItemsCluster + i + ".csv", i, 5); } }
public MeetingsController(fitnessdataContext context, UserManager <ApplicationUser> userManager) { _context = context; _userManager = userManager; _clusterer = new Clusterer(_context); _clusterer.CreateModel(); TrainbyCityGraph(); CountMeetingbyTypeGraph(); client.DefaultRequestHeaders.Accept.Clear(); client.DefaultRequestHeaders.Add("User-Agent", ".NET Foundation Repository Reporter"); }
public void CreateClustersEpinion() { var reader = new EpinionReader(Paths.EpinionRatings); var dataset = new Dataset <ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 2; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.EpinionUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.EpinionItemsCluster + i + ".csv", i, 5); } }
public void CreateClustersAmazon() { var reader = new AmazonReader(Paths.AmazonMusicRatings); var dataset = new Dataset <ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 14; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.AmazonMusicUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.AmazonMusicItemsCluster + i + ".csv", i, 5); } }
public void CreateClustersMovieLens() { var reader = new MovieLensReader(Paths.MovieLens1M); var dataset = new Dataset <ItemRating>(reader); var clusterer = new Clusterer(dataset); for (int i = 2; i < 15; i += 2) { clusterer.WriteUsersCluster(Paths.MovieLens1MUsersCluster + i + ".csv", i, 5); clusterer.WriteItemsCluster(Paths.MovieLens1MItemsCluster + i + ".csv", i, 5); } }
private async void performKMeans_Click(object sender, EventArgs e) { informer.Text = " "; var task = Task.Run(() => { var clusterer = new Clusterer(5); clusterer.Cluster(data, DrawChangedData); }); await task; informer.Text = "Done!"; }
public IBMAdamsBashforthLTS(SpatialOperator standardOperator, SpatialOperator boundaryOperator, CoordinateMapping fieldsMap, CoordinateMapping boundaryParameterMap, ISpeciesMap ibmSpeciesMap, IBMControl control, IList <TimeStepConstraint> timeStepConstraints, int reclusteringInterval, bool fluxCorrection) : base(standardOperator, fieldsMap, boundaryParameterMap, control.ExplicitOrder, control.NumberOfSubGrids, true, timeStepConstraints, reclusteringInterval: reclusteringInterval, fluxCorrection: fluxCorrection, subGrid: ibmSpeciesMap.SubGrid) { this.speciesMap = ibmSpeciesMap as ImmersedSpeciesMap; if (this.speciesMap == null) { throw new ArgumentException( "Only supported for species maps of type 'ImmersedSpeciesMap'", "speciesMap"); } this.standardOperator = standardOperator; this.boundaryOperator = boundaryOperator; this.boundaryParameterMap = boundaryParameterMap; this.fieldsMap = fieldsMap; this.control = control; agglomerationPatternHasChanged = true; cutCells = speciesMap.Tracker.Regions.GetCutCellMask(); cutAndTargetCells = cutCells.Union(speciesMap.Agglomerator.AggInfo.TargetCells); // Normal LTS constructor NumberOfLocalTimeSteps = new List <int>(control.NumberOfSubGrids); clusterer = new Clusterer(this.gridData, this.TimeStepConstraints); CurrentClustering = clusterer.CreateClustering(control.NumberOfSubGrids, speciesMap.SubGrid); CurrentClustering = CalculateNumberOfLocalTS(CurrentClustering); // Might remove sub-grids when time step sizes are too similar ABevolver = new IBMABevolve[CurrentClustering.NumberOfClusters]; for (int i = 0; i < ABevolver.Length; i++) { ABevolver[i] = new IBMABevolve(standardOperator, boundaryOperator, fieldsMap, boundaryParameterMap, speciesMap, control.ExplicitOrder, control.LevelSetQuadratureOrder, control.CutCellQuadratureType, sgrd: CurrentClustering.Clusters[i], adaptive: this.adaptive); ABevolver[i].OnBeforeComputeChangeRate += (t1, t2) => this.RaiseOnBeforeComputechangeRate(t1, t2); } GetBoundaryTopology(); #if DEBUG for (int i = 0; i < CurrentClustering.NumberOfClusters; i++) { Console.WriteLine("IBM AB LTS ctor: id=" + i + " -> sub-steps=" + NumberOfLocalTimeSteps[i] + " and elements=" + CurrentClustering.Clusters[i].GlobalNoOfCells); } #endif // Start-up phase needs an IBM Runge-Kutta time stepper RungeKuttaScheme = new IBMSplitRungeKutta(standardOperator, boundaryOperator, fieldsMap, boundaryParameterMap, speciesMap, timeStepConstraints); }
private Tuple <Clusterer, ClusteredReportResults, ReportColorScheme> GetClusteredResultsBackground(ILongWaitBroker longWaitBroker) { var clusterer = Clusterer.CreateClusterer(longWaitBroker.CancellationToken, BindingListSource.ClusteringSpec ?? ClusteringSpec.DEFAULT, BindingListSource.ReportResults); if (clusterer == null) { return(null); } var results = clusterer.GetClusteredResults(); var colorScheme = ReportColorScheme.FromClusteredResults(longWaitBroker.CancellationToken, results); return(Tuple.Create(clusterer, results, colorScheme)); }
protected QueryResults RunAll(CancellationToken cancellationToken, QueryResults results) { var pivotedRows = Pivot(cancellationToken, results); var dataSchema = results.Parameters.ViewInfo.DataSchema; var transformedRows = Transform(cancellationToken, dataSchema, new TransformResults(null, null, pivotedRows), results.Parameters.TransformStack); if (null != results.Parameters.ClusteringSpec) { var clusteredResults = Clusterer.PerformClustering(cancellationToken, results.Parameters.ClusteringSpec, transformedRows.PivotedRows); if (clusteredResults != null) { transformedRows = new TransformResults(transformedRows.Parent, transformedRows.RowTransform, clusteredResults); } } return(results.ChangeTransformResults(transformedRows)); }
public MapObjects(MapControl map, UInt16 horizontalTiles, UInt16 verticalTiles) { HorizontalTiles = horizontalTiles; VerticalTiles = verticalTiles; ObjectPool = new ObjectPool <TObject, TCluster>(HorizontalTiles, VerticalTiles, MaximumUnclusteredElements); ObjectPool.ObjectTapped += (sender, context) => ObjectTapped(sender, context); ObjectPool.ClusterTapped += (sender, context) => ClusterTapped(sender, context); Map = map; Filter = new MapObjectsFilter(); Clusterer = new Clusterer(); Matrix = new TileMatrix <Object>(HorizontalTiles, VerticalTiles, map.ZoomLevel, new GeoPoint(map.Center.Position.Latitude, map.Center.Position.Longitude)); EventSampler = new MapControlEventSampler(map, EventFrequency); EventSampler.MapViewChanged += OnMapViewChanged; }
public void FloydWarshallClustererTest() { Clusterer <string> clusterAlgorithm = ClustererFactory.GetInstance <string>("FloydWarshallClusterer"); clusterAlgorithm.Matches.Add(new MatchPair <string>("Frankfurt", "Wiesbaden", 40)); clusterAlgorithm.Matches.Add(new MatchPair <string>("Frankfurt", "Mainz", 30)); clusterAlgorithm.Matches.Add(new MatchPair <string>("Mainz", "Wiesbaden", 15)); clusterAlgorithm.Matches.Add(new MatchPair <string>("Rüdesheim", "Geisenheim", 4)); clusterAlgorithm.Execute(); Assert.AreEqual(new MatchPair <string>("Frankfurt", "Wiesbaden", 40), clusterAlgorithm.Clusters[0][0]); Assert.AreEqual(new MatchPair <string>("Frankfurt", "Mainz", 30), clusterAlgorithm.Clusters[0][1]); Assert.AreEqual(new MatchPair <string>("Mainz", "Wiesbaden", 15), clusterAlgorithm.Clusters[0][2]); Assert.AreEqual(new MatchPair <string>("Rüdesheim", "Geisenheim", 4), clusterAlgorithm.Clusters[1][0]); }
//################# Hack for saving to database in every (A)LTS sub-step /// <summary> /// Standard constructor for the (adaptive) local time stepping algorithm /// </summary> /// <param name="spatialOp">Spatial operator</param> /// <param name="Fieldsmap">Coordinate mapping for the variable fields</param> /// <param name="Parameters">optional parameter fields, can be null if <paramref name="spatialOp"/> contains no parameters; must match the parameter field list of <paramref name="spatialOp"/>, see <see cref="BoSSS.Foundation.SpatialOperator.ParameterVar"/></param> /// <param name="order">LTS/AB order</param> /// <param name="numOfClusters">Amount of sub-grids/clusters to be used for LTS</param> /// <param name="timeStepConstraints">Time step constraints for later usage as metric</param> /// <param name="subGrid">Sub-grids, e.g., from previous time steps</param> /// <param name="fluxCorrection">Bool for triggering the fluss correction</param> /// <param name="reclusteringInterval">Interval for potential reclustering</param> /// <param name="saveToDBCallback">Hack for plotting all sub-steps</param> /// <remarks>Uses the k-Mean clustering, see <see cref="BoSSS.Solution.Utils.Kmeans"/>, to generate the element groups</remarks> public AdamsBashforthLTS(SpatialOperator spatialOp, CoordinateMapping Fieldsmap, CoordinateMapping Parameters, int order, int numOfClusters, IList <TimeStepConstraint> timeStepConstraints = null, SubGrid subGrid = null, bool fluxCorrection = true, int reclusteringInterval = 0, Action <TimestepNumber, double> saveToDBCallback = null, int initialTimestepNumber = 1) : base(spatialOp, Fieldsmap, Parameters, order, timeStepConstraints, subGrid) { if (reclusteringInterval != 0) { numberOfClustersInitial = numOfClusters; this.timestepNumber = initialTimestepNumber; this.adaptive = true; } // Add OnBeforeComputeChangeRate (AV) to start-up phase time stepper RungeKuttaScheme.OnBeforeComputeChangeRate += (t1, t2) => this.RaiseOnBeforeComputechangeRate(t1, t2); this.reclusteringInterval = reclusteringInterval; this.gridData = Fieldsmap.Fields.First().GridDat; this.fluxCorrection = fluxCorrection; NumberOfLocalTimeSteps = new List <int>(numOfClusters); clusterer = new Clusterer(this.gridData, this.TimeStepConstraints); CurrentClustering = clusterer.CreateClustering(numOfClusters, this.SubGrid); // Might remove clusters when their centres are too close CurrentClustering = CalculateNumberOfLocalTS(CurrentClustering); // Might remove clusters when time step sizes are too similar ABevolver = new ABevolve[CurrentClustering.NumberOfClusters]; for (int i = 0; i < ABevolver.Length; i++) { ABevolver[i] = new ABevolve(spatialOp, Fieldsmap, Parameters, order, adaptive: this.adaptive, sgrd: CurrentClustering.Clusters[i]); ABevolver[i].OnBeforeComputeChangeRate += (t1, t2) => this.RaiseOnBeforeComputechangeRate(t1, t2); } GetBoundaryTopology(); #if DEBUG for (int i = 0; i < CurrentClustering.NumberOfClusters; i++) { Console.WriteLine("AB LTS Ctor: id=" + i + " -> sub-steps=" + NumberOfLocalTimeSteps[i] + " and elements=" + CurrentClustering.Clusters[i].GlobalNoOfCells); } #endif // Saving time steps in subgrids //this.saveToDBCallback = saveToDBCallback; }
private Dictionary <ImmutableList <HeaderLevel>, PointPairList> GetColumnPointPairLists( PivotedProperties.SeriesGroup seriesGroup, int xAxisIndex, int yAxisIndex) { var pointLists = new Dictionary <ImmutableList <HeaderLevel>, PointPairList>(); var results = Clusterer.PerformPcaOnColumnGroup(seriesGroup, Math.Max(xAxisIndex, yAxisIndex) + 1); var headerLevels = Clusterer.Properties.GetColumnHeaders(seriesGroup).ToList(); for (int iColumn = 0; iColumn < results.ItemComponents.Count; iColumn++) { var headers = new List <HeaderLevel>(); foreach (var series in headerLevels) { var pd = series.PropertyDescriptors[iColumn]; var objectValue = Clusterer.RowItems.Select(pd.GetValue).FirstOrDefault(value => null != value); headers.Add(new HeaderLevel(series.SeriesCaption, objectValue, ColorScheme.GetColor(series, objectValue) ?? MISSING_COLOR)); } var key = ImmutableList.ValueOf(headers); PointPairList pointPairList; if (!pointLists.TryGetValue(key, out pointPairList)) { pointPairList = new PointPairList(); pointLists.Add(key, pointPairList); } var pointInfo = new PointInfo(key); var cellLocator = CellLocator.ForColumn(headerLevels.Select(series => series.PropertyDescriptors[iColumn]).ToList(), ImmutableList.Empty <DataPropertyDescriptor>()); var rowItem = Clusterer.RowItems[0]; pointInfo = pointInfo.ChangeIdentityPath(cellLocator.GetSkylineDocNode(rowItem)?.IdentityPath) .ChangeReplicateName(cellLocator.GetReplicate(rowItem)?.Name); var pointPair = new PointPair(results.ItemComponents[iColumn][xAxisIndex], results.ItemComponents[iColumn][yAxisIndex]) { Tag = pointInfo }; pointPairList.Add(pointPair); } return(pointLists); }
////################# Hack for saving to database in every (A)LTS sub-step //private Action<TimestepNumber, double> saveToDBCallback; ////################# Hack for saving to database in every (A)LTS sub-step /// <summary> /// Standard constructor for the (adaptive) local time stepping algorithm /// </summary> /// <param name="spatialOp">Spatial operator</param> /// <param name="Fieldsmap">Coordinate mapping for the variable fields</param> /// <param name="Parameters">optional parameter fields, can be null if <paramref name="spatialOp"/> contains no parameters; must match the parameter field list of <paramref name="spatialOp"/>, see <see cref="BoSSS.Foundation.SpatialOperator.ParameterVar"/></param> /// <param name="order">LTS/AB order</param> /// <param name="numOfClusters">Amount of sub-grids/clusters to be used for LTS</param> /// <param name="timeStepConstraints">Time step constraints for later usage as metric</param> /// <param name="subGrid">Sub-grids, e.g., from previous time steps</param> /// <param name="fluxCorrection">Bool for triggering the fluss correction</param> /// <param name="reclusteringInterval">Interval for potential reclustering</param> /// <param name="saveToDBCallback">Hack for plotting all sub-steps</param> /// <remarks>Uses the k-Mean clustering, see <see cref="BoSSS.Solution.Utils.Kmeans"/>, to generate the element groups</remarks> public AdamsBashforthLTS(SpatialOperator spatialOp, CoordinateMapping Fieldsmap, CoordinateMapping Parameters, int order, int numOfClusters, IList <TimeStepConstraint> timeStepConstraints = null, SubGrid subGrid = null, bool fluxCorrection = true, int reclusteringInterval = 0, Action <TimestepNumber, double> saveToDBCallback = null, int maxNumOfSubSteps = 0, bool forceReclustering = false, bool logging = false, bool consoleOutput = false) : base(spatialOp, Fieldsmap, Parameters, order, timeStepConstraints, subGrid) { this.forceReclustering = forceReclustering; this.Logging = logging; this.ConsoleOutput = consoleOutput; if (reclusteringInterval != 0) { NumberOfClustersInitial = numOfClusters; this.adaptive = true; } // Add OnBeforeComputeChangeRate (AV) to start-up phase time stepper RungeKuttaScheme.OnBeforeComputeChangeRate += (t1, t2) => this.RaiseOnBeforeComputechangeRate(t1, t2); this.reclusteringInterval = reclusteringInterval; this.gridData = Fieldsmap.Fields.First().GridDat; this.fluxCorrection = fluxCorrection; if (ConsoleOutput) { Console.WriteLine("### This is ABLTS ctor ###"); } clusterer = new Clusterer(this.gridData, maxNumOfSubSteps); CurrentClustering = clusterer.CreateClustering(numOfClusters, this.TimeStepConstraints, this.SubGrid); // Might remove clusters when their centres are too close CurrentClustering = clusterer.TuneClustering(CurrentClustering, Time, this.TimeStepConstraints); // Might remove clusters when their time step sizes are too similar ABevolver = new ABevolve[CurrentClustering.NumberOfClusters]; for (int i = 0; i < ABevolver.Length; i++) { ABevolver[i] = new ABevolve(spatialOp, Fieldsmap, Parameters, order, adaptive: this.adaptive, sgrd: CurrentClustering.Clusters[i]); ABevolver[i].OnBeforeComputeChangeRate += (t1, t2) => this.RaiseOnBeforeComputechangeRate(t1, t2); } GetBoundaryTopology(); // Saving time steps in subgrids //this.saveToDBCallback = saveToDBCallback; }
static void K_Means_Main(string[] args) { Console.WriteLine($@"\nBegin k-means clustering demo\n"); #region setup double[][] rawData = new double[10][]; rawData[0] = new double[] { 73, 72.6 }; rawData[1] = new double[] { 61, 54.4 }; rawData[2] = new double[] { 67, 99.9 }; rawData[3] = new double[] { 68, 97.3 }; rawData[4] = new double[] { 62, 59.0 }; rawData[5] = new double[] { 75, 81.6 }; rawData[6] = new double[] { 74, 77.1 }; rawData[7] = new double[] { 66, 97.3 }; rawData[8] = new double[] { 68, 93.3 }; rawData[9] = new double[] { 61, 59.0 }; //double[][] rawData = LoadData("..\\..\\HeightWeight.txt", 10, 2, ','); #endregion Console.WriteLine("Raw unclustered height (in.) weight (kg.) data:\n"); Console.WriteLine(" ID Height Weight"); Console.WriteLine("----------------------"); ShowData(rawData, 1, true, false); int numClusters = 3; Console.WriteLine("\nSetting numClusters to " + numClusters); Console.WriteLine("Starting clustering using k-means algorithm"); Clusterer c = new Clusterer(numClusters); int[] clustering = c.Cluster(rawData); Console.WriteLine("Clustering complete\n"); Console.WriteLine("Final clustering in internal form:\n"); ShowVector(clustering, true); Console.WriteLine("Raw data by cluster:\n"); ShowClustered(rawData, clustering, numClusters, 1); Console.WriteLine("\nEnd k-means clustering demo\n"); Console.ReadLine(); }
private Dictionary <ImmutableList <HeaderLevel>, PointPairList> GetRowPointPairLists(int xAxisIndex, int yAxisIndex) { var pointLists = new Dictionary <ImmutableList <HeaderLevel>, PointPairList>(); var results = Clusterer.PerformPcaOnRows(Math.Max(xAxisIndex, yAxisIndex) + 1); var cellLocator = CellLocator.ForRow(Clusterer.Properties.RowHeaders); for (int iRow = 0; iRow < results.ItemLabels.Count; iRow++) { var rowItem = results.ItemLabels[iRow]; var headers = new List <HeaderLevel>(); foreach (var pdHeader in Clusterer.Properties.RowHeaders) { var objectValue = pdHeader.GetValue(rowItem); headers.Add(new HeaderLevel(pdHeader.ColumnCaption, objectValue, ColorScheme.GetColor(pdHeader, rowItem) ?? MISSING_COLOR)); } var key = ImmutableList.ValueOf(headers); PointPairList pointPairList; if (!pointLists.TryGetValue(key, out pointPairList)) { pointPairList = new PointPairList(); pointLists.Add(key, pointPairList); } var pointInfo = new PointInfo(key); pointInfo = pointInfo.ChangeIdentityPath(cellLocator.GetSkylineDocNode(rowItem)?.IdentityPath) .ChangeReplicateName(cellLocator.GetReplicate(rowItem)?.Name); var point = new PointPair(results.ItemComponents[iRow][xAxisIndex], results.ItemComponents[iRow][yAxisIndex]) { Tag = pointInfo }; pointPairList.Add(point); } return(pointLists); }
public void SetData(Clusterer clusterer, ReportColorScheme colorScheme) { Clusterer = clusterer; ColorScheme = colorScheme; UpdateControls(); }
protected void WriteLayerClusterer(Clusterer clusterer) { if (clusterer is FlareClusterer) { WriteFlareClusterer(clusterer as FlareClusterer); } else throw new NotSupportedException(Resources.Strings.ExceptionClusterer); }
public void TestNmfClusting() { var reader = new AmazonReader(Paths.AmazonBooksRatings); var dataset = new Dataset<ItemRating>(reader); var clusterer = new Clusterer(dataset); clusterer.ClusterNmf(5, Paths.AmazonBooksUsersCluster); }