internal void MoveTo(ClusterList s, int toID) {//移动元素操作 Cluster from = s.clusters[ClusterID]; Cluster to = s.clusters[toID]; //if it's the first element if (Before == null) { from.First = After; if (After != null) { After.Before = null; } } else { Before.After = After; if (After != null) { After.Before = Before; } } from.Num--; from.NumberLinks = from.NumberLinks - from.Num; Before = null; After = null; //remove then insert to.Add(this); }
public ClusterList ConsolidateLCS(int minScoreAllowed, out int lcsBest) { // Consolidate best LCS by combining 1 cluster with another int iBest = -1, jBest = -1; lcsBest = 0; for (int i = 0; i < clusters.Count; i++) { for (int j = i + 1; j < clusters.Count; j++) { int lcs = clusters[i].ScoreLCS(clusters[j]); if (lcs >= lcsBest) { lcsBest = lcs; iBest = i; jBest = j; } } } // Return this list unchanged if (lcsBest < minScoreAllowed) { return(this); } else { // Get best candidates Cluster iCluster = clusters[iBest]; Cluster jCluster = clusters[jBest]; // Combine clusters ClusterList newClusters = new ClusterList(); for (int i = 0; i < clusters.Count; i++) { // Create new cluster copying the old one Cluster cluster = new Cluster(); clusters[i].CopyTo(cluster); // If this is the i match if (i == iBest) { // Combine j with cluster i cluster.Combine(jCluster); // Add combined cluster newClusters.clusters.Add(cluster); } else if (i == jBest) { // Do nothing already combined in a previous step above } else { // Need to add the cluster to the list newClusters.clusters.Add(cluster); } } // Return new cluster list return(newClusters); } }
public void AddClusterToHyperCluster(Cluster cluster) { ClusterList.Add(cluster); AdaptiveIntersect.UpdateClusterIntersectionByLast(ClusterList, HyperClusterVector); AdaptiveIntersect.UpdateClusterSummaryByLast(ClusterList, HyperClusterVectorSummary); ValidHyperClusterItemList = false; //TODO: rendutant analyse to remove it //for (int i = 0; i < cluster.ClusterItemList.Count; i++) //{ // HyperClusterItemList.Add(cluster.ClusterItemList[i]); //} }
private void initClustListData() { ListViewItem lvi; //先添加0(好友设置)和10000(默认群设置) string friendSwitch = SQLiteHandler.getClusterIsEnabled("0"); string defaultSwitch = SQLiteHandler.getClusterIsEnabled("10000"); lvi = new ListViewItem("0"); lvi.SubItems.Add("好友设置"); if (friendSwitch == "0") { lvi.SubItems.Add("关"); } else { lvi.SubItems.Add("开"); } lvi = listView_clusterList.Items.Add(lvi); lvi = new ListViewItem("10000"); lvi.SubItems.Add("默认群设置"); if (defaultSwitch == "0") { lvi.SubItems.Add("关"); } else { lvi.SubItems.Add("开"); } lvi = listView_clusterList.Items.Add(lvi); //获取群列表 ClusterList clusterlist = Plugin.Client.ClusterList; foreach (KeyValuePair <uint, ClusterInfo> kv in clusterlist) { ClusterInfo clusterInfo = kv.Value; string clusterId = kv.Key.ToString(); string clusterSwitch = SQLiteHandler.getClusterIsEnabled(clusterId); lvi = new ListViewItem(clusterId); lvi.SubItems.Add(clusterInfo.Name); if (clusterSwitch == "1") { lvi.SubItems.Add("开"); } else { lvi.SubItems.Add("关"); } lvi = listView_clusterList.Items.Add(lvi); } }
public void SetInitialClusterCenters() { //for (int i = 0; i < NumberOfClusters; i++) //{ // Random rnd = new Random(); // int num = rnd.Next(0, ElementList.Count - 1); // ClusterList.Add(new Cluster(ElementList[num]), i+1); //} ClusterList.Add(new Cluster(ElementList[0], 1)); ClusterList.Add(new Cluster(ElementList[8], 2)); ClusterList.Add(new Cluster(ElementList[15], 3)); ClusterList.Add(new Cluster(ElementList[17], 4)); ClusterList.Add(new Cluster(ElementList[23], 5)); }
public ClusterGenerator(IGpsValueConverter converter, int NumZoomLevels) { this.cachedConvertedObjects = new Dictionary <Object, BasicGeoposition>(); this.keyToItems = new Dictionary <string, IList <Object> >(); this.zoomLevelToClusters = new List <ClusterList>(); this.validItemsToCluster = new ClusterList(); this.gpsConverter = converter; this.numZoomLevels = NumZoomLevels; // This is a 100 pixels for the minimum hitbox length this.MinHitBoxSizeInMiles = GeospatialHelperStatic.ConvertPixelsToMiles(100, NumZoomLevels); this.LeastClustersZoomLevel = 1; this.MostClustersZoomLevel = 1; }
private void Initialize() { IP = new byte[4]; ServerIp = new byte[4]; LastLoginIp = new byte[4]; IsLoggedIn = false; LoginMode = QQStatus.ONLINE; IsUdp = true; ContactInfo = new ContactInfo(); IsShowFakeCam = false; Friends = new FriendList(this); QQList = new QQList(); ClusterList = new ClusterList(); this.QQKey = new QQKey(this); }
public void AddItemToClusters(Object item) { if (!GeospatialHelperStatic.IsValidGPS(ConvertObjToGPS(item))) { return; } for (int i = 1; i <= this.numZoomLevels; i++) { // List of Clusters for this zoom level ClusterList clusters = this.zoomLevelToClusters[i - 1]; bool addedToCluster = false; BasicGeoposition itemCoord = ConvertObjToGPS(item); for (int j = 0; j < clusters.Count; j++) { if (IsWithinBoundary(itemCoord, clusters[j].Location, i)) { clusters[j].Count += 1; clusters[j].Objects.Add(item); string id = String.Format("ZL{0}_C{1}", i, j); this.keyToItems[id].Add(item); addedToCluster = true; break; } } if (addedToCluster == false) { string id = String.Format("ZL{0}_C{1}", i, clusters.Count); List <Object> singleItem = new List <Object>() { item }; clusters.Add(new Cluster() { ClusterId = id, Objects = singleItem, Count = 1, Location = itemCoord }); this.keyToItems.Add(id, singleItem); } this.zoomLevelToClusters[i - 1] = clusters; } }
public async Task <Response <ClusterList> > ListBySubscriptionAsync(string subscriptionId, CancellationToken cancellationToken = default) { Argument.AssertNotNullOrEmpty(subscriptionId, nameof(subscriptionId)); using var message = CreateListBySubscriptionRequest(subscriptionId); await _pipeline.SendAsync(message, cancellationToken).ConfigureAwait(false); switch (message.Response.Status) { case 200: { ClusterList value = default; using var document = await JsonDocument.ParseAsync(message.Response.ContentStream, default, cancellationToken).ConfigureAwait(false); value = ClusterList.DeserializeClusterList(document.RootElement); return(Response.FromValue(value, message.Response)); }
public void Create() { ClusterList.Clear(); HyperClusterList.Clear(); ItemToClusterMap.Clear(); ClusterToHyperClusterMap.Clear(); for (int i = 0; i < FeatureItemList.Count; i++) { AssignCluster(FeatureItemList[i]); } //Get items assigned to hyperClusters for (int i = 0; i < HyperClusterList.Count; i++) { HyperClusterList[i].GetHyperClusterItemList(); } }
public void Create(ICollection <FeatureItem> itemCollection) { FeatureItemList.AddRange(itemCollection); ClusterList.Clear(); HyperClusterList.Clear(); ItemToClusterMap.Clear(); ClusterToHyperClusterMap.Clear(); for (int i = 0; i < FeatureItemList.Count; i++) { FeatureItemList[i].Id = i; AssignCluster(FeatureItemList[i]); } //Get items assigned to hyperClusters for (int i = 0; i < HyperClusterList.Count; i++) { HyperClusterList[i].GetHyperClusterItemList(); } }
public ClusterGenerator(IGpsValueConverter converter, int NumZoomLevels, double lengthValue, BoxLengthType hitBoxType) { this.cachedConvertedObjects = new Dictionary <Object, BasicGeoposition>(); this.keyToItems = new Dictionary <string, IList <Object> >(); this.zoomLevelToClusters = new List <ClusterList>(); this.validItemsToCluster = new ClusterList(); this.gpsConverter = converter; this.numZoomLevels = NumZoomLevels; if (hitBoxType == BoxLengthType.Distance) { this.MinHitBoxSizeInMiles = lengthValue; } else if (hitBoxType == BoxLengthType.Pixels) { this.MinHitBoxSizeInMiles = GeospatialHelperStatic.ConvertPixelsToMiles(lengthValue, NumZoomLevels); } this.LeastClustersZoomLevel = 1; this.MostClustersZoomLevel = 1; }
// Removes item from the clusters, also will need to delete clusters if they are empty public void RemoveItemFromClusters(Object item) { if (!GeospatialHelperStatic.IsValidGPS(ConvertObjToGPS(item))) { return; } for (int i = 1; i <= this.numZoomLevels; i++) { // List of Clusters for this zoom level ClusterList clusters = this.zoomLevelToClusters[i - 1]; for (int j = 0; j < clusters.Count; j++) { for (int k = 0; k < clusters[j].Objects.Count; k++) { if (item == clusters[j].Objects[k]) { // Remove the entire cluster if there is only one element if (clusters[j].Objects.Count == 1) { clusters.RemoveAt(j); } // Remove just that object from the list else { clusters[j].Objects.RemoveAt(k); } // only one object removed per zoom level and since we can't do multi-loop breaks // have this assignment to break out of the 2nd loops j = clusters.Count; break; } } } } }
public bool RemoveClusterFromHyperCluster(Cluster cluster) { if (ClusterList.Remove(cluster) == true) { if (ClusterList.Count > 0) { AdaptiveIntersect.CalculateClusterIntersection(ClusterList, HyperClusterVector); AdaptiveIntersect.CalculateClusterSummary(ClusterList, HyperClusterVectorSummary); //TODO: redundant analyse to remove it // nie ma senzu sa kazdym razem tworzyc listy, tylko wtedy gdy bedzie do niej potrzebny dostep //HyperClusterItemList.Clear(); //for(int i=0; i< ClusterList.Count; i++) //{ // for(int j = 0; j < ClusterList[i].ClusterItemList.Count; j++) // { // HyperClusterItemList.Add(ClusterList[i].ClusterItemList[j]); // } //} } ValidHyperClusterItemList = false; } return(ClusterList.Count > 0); }
private void Initialize() { IP = new byte[4]; ServerIp = new byte[4]; LastLoginIp = new byte[4]; IsLoggedIn = false; LoginMode = QQStatus.我在线上; IsUdp = true; ContactInfo = new ContactInfo(); IsShowFakeCam = false; Friends = new FriendList(this); QQList = new QQList(); ClusterList = new ClusterList(); this.QQKey = new QQKey(this); }
private void bgw_DoWork(object sender, DoWorkEventArgs e) { bgw.ReportProgress(0, "Reading data file..."); //读取in文件 readDataFile(); bgw.ReportProgress(10, "Initializing ClusterList..."); //初始化聚类对象 clusterList = new ClusterList(); bgw.ReportProgress(15, "Loading data..."); //导入数据 clusterList.importData(dataFileParser.inputScore, dataFileParser.normlizedScore); //record the start time 记录算法开始时间 startTime = DateTime.Now; //构造初始解 bgw.ReportProgress(20, "Constructing initial solution..."); clusterList.doInitial(initMethod); bgw.ReportProgress(25, "Initializing Tabu Search..."); clusterList.initTSPhase(); bgw.ReportProgress(30, "Executing Tabu Search..."); for (int i = 0; i < Globals.EleNum; i++) { Util.WriteLine("Element(" + i + "):" + dataFileParser.nameFormater.getName(i)); } Util.WriteLine("********************************\n"); Util.WriteLine("Movelog."); clusterList.execTSPhase(bgw, diverMethod); endTime = DateTime.Now; FileStream fileCluster = new FileStream(outputPath + "\\cluster_" + Globals.FileName + "_" + startTime.ToString("yyyyMMddHHmmss") + ".txt", FileMode.OpenOrCreate); using (StreamWriter sw = new StreamWriter(fileCluster)) { bgw.ReportProgress(85, "Writing Cluster Result.."); clusterList.exportResult(sw, dataFileParser.normlizedScore); if (normMethod != Normalization.NONE) { sw.WriteLine("\n\r\n\r"); sw.WriteLine("\nClustering Result mapping to the original data:"); clusterList.exportResult(sw, dataFileParser.inputScore); } sw.WriteLine("\n\r\n\r"); //输出界面参数 sw.WriteLine("LBound = " + m_LB + "\tUbound = " + m_UB); sw.WriteLine("A1 = " + Globals.A1 + "\tA2 = " + Globals.A2 + "\tA3 = 0"); sw.WriteLine("Time Cost = " + (endTime - startTime).ToString("g")); } FileStream fileElement = new FileStream(outputPath + "\\element_" + Globals.FileName + "_" + startTime.ToString("yyyyMMddHHmmss") + ".txt", FileMode.OpenOrCreate); using (StreamWriter sw = new StreamWriter(fileElement)) { bgw.ReportProgress(90, "Writing Elements Distribution Result.."); if (dataType != DataType.DISTANCE) { sw.WriteLine("CID\t EID\t EName\t \tX \tY \tZ"); for (int i = 1; i <= Globals.CluNum; i++) { for (int j = 0; j < Globals.EleNum; j++) { if (clusterList.elements[j].ClusterID == i) { MyPoint p = dataFileParser.points[j]; sw.WriteLine(i + "\t" + j + "\t" + dataFileParser.nameFormater.getName(j) + "\t" + p.X + "\t" + p.Y + "\t" + p.Z + '\n'); } } } } else//非点集的输出 { sw.WriteLine("CID\t EID\t EName\t "); for (int i = 1; i <= Globals.CluNum; i++) { for (int j = 0; j < Globals.EleNum; j++) { if (clusterList.elements[j].ClusterID == i) { sw.WriteLine(i + "\t" + j + "\t" + dataFileParser.nameFormater.getName(j) + '\n'); } } } } } bgw.ReportProgress(100, "Done! Time Cost =" + (endTime - startTime).ToString("g")); }
public void AssignCluster(FeatureItem item) { int iterationCounter = IterationLimit; //assign IterationLimit bool isAssignementChanged = true; double itemVectorMagnitude = CalculateVectorMagnitude(item.FeatureVector); while (isAssignementChanged && iterationCounter > 0) { isAssignementChanged = false; List <KeyValuePair <Cluster, double> > clusterToProximityList = new List <KeyValuePair <Cluster, double> >(); double proximityThreshold = itemVectorMagnitude / (bValue + rangeLimit * FeatureItemSize); // ||E_i||/(b+1) //Calculate proximity values for item and clusters for (int i = 0; i < ClusterList.Count; i++) { double clusterVectorMagnitude = CalculateVectorMagnitude(ClusterList[i].ClusterVector); double proximity = CaulculateVectorIntersectionMagnitude(item.FeatureVector, ClusterList[i].ClusterVector) / (bValue + clusterVectorMagnitude); //prox = ||C_j and E_i ||/ (b + ||E_i||) > proxThres if (proximity > proximityThreshold) { clusterToProximityList.Add(new KeyValuePair <Cluster, double>(ClusterList[i], proximity)); } } if (clusterToProximityList.Count > 0) //???? tutaj zobaczyc, czy nie trzeba sprawdzic dodania lub ominiecia dodania { clusterToProximityList.Sort((x, y) => - 1 * x.Value.CompareTo(y.Value)); //sorting in place in descending order //search from the maximum proximity to smallest for (int i = 0; i < clusterToProximityList.Count; i++) { Cluster newCluster = clusterToProximityList[i].Key; double vigilance = CaulculateVectorIntersectionMagnitude(newCluster.ClusterVector, item.FeatureVector) / itemVectorMagnitude; if (vigilance >= pValue) //passed all tests and has max proximity { if (ItemToClusterMap.ContainsKey(item.Id)) //find cluster with this item { Cluster previousCluster = ItemToClusterMap[item.Id]; if (ReferenceEquals(newCluster, previousCluster)) { break; //if the best is the same, then it will break (not considered others) } if (previousCluster.RemoveItemFromCluster(item) == false) //the cluster is empty { ClusterList.Remove(previousCluster); } } //Add item to the current cluster newCluster.AddItemToCluster(item); ItemToClusterMap[item.Id] = newCluster; isAssignementChanged = true; break; } } } if (ItemToClusterMap.ContainsKey(item.Id) == false) { Cluster newCluster = new Cluster(item); ClusterList.Add(newCluster); ItemToClusterMap.Add(item.Id, newCluster); isAssignementChanged = true; } iterationCounter--; } AssignHyperCluster(); }
public WebModule() : base() { Get["/"] = parameters => { RestoreSession(); ApplicationModel appModel = new ApplicationModel(Application.APPLICATION_NAME); return(Render("index.pt", context: appModel, view: new ApplicationView(appModel, CurrentSession))); }; Get["/objs"] = parameters => // Это страница сайта с квартирами. { RestoreSession(); ObjectList objList = new ObjectList(); // Надо отлаживать в монодевелоп... ObjectListView objView = new ObjectListView(objList); return(Render("objlist.pt", context: objList, view: objView)); }; Get["/offers"] = parameters => { RestoreSession(); OfferList model = new OfferList(null); OfferListView view = new OfferListView(model); return(Render("offerlist.pt", context: model, view: view)); }; Get["/offers/{clid}"] = parameters => { int clid = int.Parse(parameters.clid); RestoreSession(); OfferList model = new OfferList(clid: clid); OfferListView view = new OfferListView(model); return(Render("offerlist.pt", context: model, view: view)); }; Get["/offer/{GUID}"] = parameters => // Эта страница с индивидуальной квартирой { RestoreSession(); string GUID = parameters.GUID; IOffer model = Application.Context.Offers.Where(x => x.GUID == GUID).FirstOrDefault(); // По идее в BrightStarDB есть у каждого объекта свой ID и наш // GUID можно к нему привязать. FIXME: Привязать! string msg = "Объект (Offer) не найден!: " + GUID; if (model == null) { Console.WriteLine(msg); // и я НЕ понял почему.... return("msg"); } else { Console.WriteLine(model); } // Надо нудно искать ошибку в основном шаблоне.... // Завтра. Вырубает.... OfferView view = new OfferView(model); return(Render("offer.pt", context: model, view: view)); }; Get["/agents"] = parameters => { RestoreSession(); AgentList model = new AgentList(); AgentListView view = new AgentListView(model); return(Render("agentlist.pt", context: model, view: view)); }; Get["/login"] = parameters => // Эта страница уже лет 20 не нужна. { RestoreSession(); LoginObject model = new LoginObject(); LoginView view = new LoginView(model, this.Request, CurrentSession); // return View["login.pt", testModel]; // Оставим для истории. // Это, к стати правильный вариант отрисовки по шаблону. return(Render("login.pt", context: model, view: view)); }; // Принимаем данные пользователя из формы регистрации Post["/login"] = parameters => { RestoreSession(); LoginObject model = new LoginObject(); LoginView view = new LoginView(model, this.Request, CurrentSession); Response response = null; bool res = view.Process(); CurrentSession = view.Session; // Обновление сессии if (res) { response = Response.AsRedirect("/"); } else // Неуданая идентификация { response = Response.AsRedirect("/login"); } // Перенаправить браузер на домашнюю страницу. return(InSession(response)); }; Get["/logout"] = parameters => // Эта страница уже лет 20 не нужна. { RestoreSession(); LoginObject model = new LoginObject(); LoginView view = new LoginView(model, this.Request, CurrentSession); // return View["login.pt", testModel]; // Оставим для истории. // Это, к стати правильный вариант отрисовки по шаблону. view.Logout(); CurrentSession = view.Session; return(Render("login.pt", context: model, view: view)); }; Post["/clustering"] = parameters => { RestoreSession(); Response response = null; int num = 0; try { num = int.Parse(this.Request.Form.max); int clnum = 5; FlatClusterAnalyzer a = FlatClusterAnalyzer.AnalyzeFlatWithCluster(num); a.Store(clnum); CurrentSession["message"] = info("Обработано для " + num + " квартир, " + clnum + " кластеров", msg: "Успешный запуск"); CurrentSession["analysis_data"] = a; } catch (FormatException) { CurrentSession["message"] = error("Неправильное число квартир", msg: "Неуспешный запуск"); } response = Response.AsRedirect("/analysis"); return(InSession(response)); }; Get["/analysis"] = parameters => { RestoreSession(); ClusterList model = new ClusterList(); ClusterListView view = new ClusterListView(model); return(Render("clusters.pt", context: model, view: view)); }; Post["/analysis"] = parameters => { RestoreSession(); ClusterList model = new ClusterList(); ClusterListView view = new ClusterListView(model); FlatClusterAnalyzer analyzer = null; var form = this.Request.Form; if (form.reconstruct != null) { try { analyzer = (FlatClusterAnalyzer)CurrentSession["analysis_data"]; int k = int.Parse(form.numclusters); Console.WriteLine("---> K=" + k); analyzer.Store(k); CurrentSession["message"] = info("Произведена перестройка кластера", msg: "Удачное завершение операции"); } catch { // В сессии нет данных по кластеру. CurrentSession["message"] = error("Похоже кластер не рассчитан", msg: "Неудачная операция"); } } return(InSession(Response.AsRedirect("/analysis"))); }; }
// Perf? Do it by pictures first and then by zoom level? // The assumption is that the d public void GenerateClusteringData(IEnumerable items) { this.cachedConvertedObjects.Clear(); this.keyToItems.Clear(); this.zoomLevelToClusters.Clear(); this.LeastClustersZoomLevel = 1; this.MostClustersZoomLevel = 1; CreateValidItemsAsClusters(items); // 1 is space view // 20 is ground view for (int i = 1; i <= this.numZoomLevels; i++) { // List of Clusters for this zoom level ClusterList clusters = new ClusterList(); foreach (var item in items) { BasicGeoposition itemCoord = ConvertObjToGPS(item); if (!GeospatialHelperStatic.IsValidGPS(itemCoord)) { continue; } bool addedToCluster = false; for (int j = 0; j < clusters.Count; j++) { if (IsWithinBoundary(itemCoord, clusters[j].Location, i)) { clusters[j].Count += 1; clusters[j].Objects.Add(item); string id = String.Format("ZL{0}_C{1}", i, j); this.keyToItems[id].Add(item); addedToCluster = true; break; } } if (addedToCluster == false) { string id = String.Format("ZL{0}_C{1}", i, clusters.Count); List <Object> singleItem = new List <Object>() { item }; clusters.Add(new Cluster() { ClusterId = id, Objects = singleItem, Count = 1, Location = itemCoord }); List <Object> otherSingleItem = new List <Object>() { item }; this.keyToItems.Add(id, otherSingleItem); } } if (this.zoomLevelToClusters.Count > 1) { int minClusters = this.zoomLevelToClusters[this.LeastClustersZoomLevel - 1].Count; int maxClusters = this.zoomLevelToClusters[this.MostClustersZoomLevel - 1].Count; // eg: 1 1 1 3 5 5 7 7 7, this will do the 1s. See definition for what these two are if (clusters.Count <= minClusters) { this.LeastClustersZoomLevel = i; } if (clusters.Count > maxClusters) { this.MostClustersZoomLevel = i; } } this.zoomLevelToClusters.Add(clusters); } RecalculateCenters(); }
public static void Main(String[] args) { //String A = "ACBDEA"; //String B = "ABCDA"; //int[] Aint = { 12, 17, 15, 11, 6, 1, 2, 5, 5, 7, 11, 10, 12, 12, 15, 15, 17, -17, -18, 17, 17, 16, 13, 9, 6, 4, 4, 7, 9, 9, 9, 8, 14, 9, -1, 9, -16, 3, 3, 8, -11, -8, 9, -10, 9, -3, -9, -1, 3, -8, 1 }; //int[] Bint = { 11, 11, 10, 9, 7, 4, 4, 3, 3, 4, 4, 3, 4, 1, -3, -3, 1, 5, 10, 16, 17, -18, 14, 9, 0, -2, -3, -2, -1, -1, -1, 0, 0, 0, 1, 1, 1, 2, 3, 4, 6, 8, 10, 11, 10, 10, 10, 8, 10, 10, 8, 8, 9, 8, 6, 8, 9, -18, 14, -9 }; //int tempResult = findInt(Aint, Bint); //UNNAMED_01_1851,0,-1,0,1,-1,0,0,2,4,7,9,9,10,16, List <SortedSet <string> > resultSet = new List <SortedSet <string> >(); using (TextFieldParser parser = new TextFieldParser(@"C:\Users\User\Desktop\HurricaneProject\sequences.txt")) { parser.TextFieldType = Microsoft.VisualBasic.FileIO.FieldType.Delimited; parser.SetDelimiters(","); Dictionary <string, int[]> sequenceDict = new Dictionary <string, int[]>(); while (!parser.EndOfData) { //Process row string[] fields = parser.ReadFields(); string name = fields[0]; int length = fields.Length - 1; while (fields[length] == "") { length--; } int[] tempInt = new int[length]; for (int i = 1; i <= length; i++) { tempInt[i - 1] = Int32.Parse(fields[i]); } sequenceDict.Add(name, tempInt); } /* int[] Aint = sequenceDict["UNNAMED_05_1864"]; * int[] Bint = sequenceDict["HUMBERTO_09_2007"]; * * string lcsMatch; * int lcs = Hurricane.findInt(Aint, Bint, out lcsMatch);*/ StreamWriter logFile = new StreamWriter("C:\\Users\\User\\Desktop\\HurricaneProject\\ClusterLog.txt", false); ClusterList fullSet = new ClusterList(); // Build set of clusters for (int i = 0; i < sequenceDict.Keys.Count; i++) { //if (i > 200) break; string nameA = sequenceDict.Keys.ElementAt(i); int[] intA = sequenceDict[sequenceDict.Keys.ElementAt(i)]; Hurricane hur = new Hurricane(); hur.id = nameA; hur.bearing = intA; Cluster cluster = new Cluster(); cluster.id = i; cluster.set.Add(hur); fullSet.clusters.Add(cluster); } // Test print matrix Console.WriteLine("Iter 0 (full matrix)"); logFile.WriteLine("Iter 0 (full matrix)"); fullSet.Print(logFile); //fullSet.PrintMatrix(logFile); // Reqiured minimum clustering score int minScoreAllowed = 5; // Begin clustering on full list ClusterList curCluster = fullSet; // Iterate for (int i = 1; ; i++) { // Try to find two clusters to combine int lcsScore; ClusterList newCluster = curCluster.ConsolidateLCS(minScoreAllowed, out lcsScore); // No more good clustering if (newCluster == curCluster) { // No more clusters match to within minScoreAllowed logFile.WriteLine("Clustering completed sorting by cluster size"); curCluster.SortByCount(); curCluster.Print(logFile, true); // Write cluster groups StreamWriter grpFile = new StreamWriter("C:\\Users\\User\\Desktop\\HurricaneProject\\ClusterGrps.txt", false); curCluster.WriteIds(grpFile); grpFile.Close(); break; } else { // Accept the new cluster as the current curCluster = newCluster; } // Log progress logFile.WriteLine("Iter " + i.ToString() + " clusters " + curCluster.ClusterCount().ToString() + " LCSscore=" + lcsScore.ToString()); Console.WriteLine("Iter " + i.ToString() + " clusters " + curCluster.ClusterCount().ToString() + " LCSscore=" + lcsScore.ToString()); curCluster.Print(logFile); // Print if reasonable // curCluster.PrintMatrix(logFile); } Console.WriteLine("Done"); logFile.Close(); } }