public int clusteringAllTransac(string LoginID) { Clustering_Users_DAO dao = null; ClusterSchedule clusterSchedule = new ClusterSchedule(); clusterSchedule.ClusterType = ConstantValues.SCHE_CLUSTER_USER; clusterSchedule.Log = String.Empty; clusterSchedule.LoginID = LoginID; int processType = checkProcess(); if (processType == 0) { Settings st = getSettings(); try { dao = new Clustering_Users_DAO(); dao.beginTransaction(); // Add to Schedule clusterSchedule.Algorithm = st.cluster_type; string log = "Clustering Client\n"; clusterSchedule.Log = log; dao.addNewClusterSchedule(clusterSchedule); // Clean data dao.CLEAN_USER_CENTROID_TBL(); dao.CLEAN_PARTION_TBL(); dao.CLEAN_USER_CLUSTER_TBL(); dao.CLEAN_RATTING_MATRIX(); List<User_SubCategories> list_User_SubCategories = dao.getAllSubCategories(); bool existTransac = false; List<string> list_User_Categories = get_User_Categories(list_User_SubCategories); List<List<User_SubCategories>> list_SMALL_User_SubCategories = new List<List<User_SubCategories>>(); foreach (var item in list_User_Categories) list_SMALL_User_SubCategories.Add(new List<User_SubCategories>()); #region Cluster For LargeGroup int numUser = 0; Dictionary<string, int> allItem = new Dictionary<string, int>(); foreach (User_SubCategories item in list_User_SubCategories) { string categoryName = item.U_SubCategoryID + "_"; // Convert Data to matrix Dictionary<string, int> dic_users = new Dictionary<string, int>(); Dictionary<string, int> dic_items = new Dictionary<string, int>(); // Compute ratting matrix List<MatrixItem> lstRMI = dao.computeRattingMatrixItem(item.U_SubCategoryID, st.Alpha); double[][] x = Util.toMatrix_MatrixItem(lstRMI, out dic_users, out dic_items); if (dic_users.Count > st.U_M/2) { numUser += dic_users.Count; foreach (var it in dic_items) if (!allItem.ContainsKey(it.Key)) allItem.Add(it.Key, it.Value); LoadCluster(dao, st, ref existTransac, item.U_SubCategoryID, categoryName, dic_users, dic_items, x); } else add_SMALL_User_SubCategories(list_User_Categories, ref list_SMALL_User_SubCategories, item); } #endregion #region Cluster For SmallGroup dao.delete_USER_CLUSTER_TBL_2("_MERGE_"); dao.delete_PARTION_TBL_2("_MERGE_"); dao.delete_USER_CENTROID_TBL_2("_MERGE_"); for (int i = 0; i < list_User_Categories.Count; i++) { string categoryName = list_User_Categories[i] + "_MERGE_"; // Convert Data to matrix Dictionary<string, int> dic_users = new Dictionary<string, int>(); Dictionary<string, int> dic_items = new Dictionary<string, int>(); // Compute ratting matrix List<MatrixItem> lstRMI = new List<MatrixItem>(); foreach (User_SubCategories item in list_SMALL_User_SubCategories[i]) lstRMI.AddRange(dao.computeRattingMatrixItem(item.U_SubCategoryID, st.Alpha)); double[][] x = Util.toMatrix_MatrixItem(lstRMI, out dic_users, out dic_items); numUser += dic_users.Count; foreach (var it in dic_items) if (!allItem.ContainsKey(it.Key)) allItem.Add(it.Key, it.Value); LoadCluster(dao, st, ref existTransac, list_User_Categories[i], categoryName, dic_users, dic_items, x); } #endregion // Remove Excess USER_CLUSTER_TBL dao.removeExcessCluster(); // Update USE_TBL if (existTransac) updateTransac_CheckPoint(); log += "\t Number of Client Categories: " + list_User_SubCategories.Count; log += "\t Number of Client : " + numUser; log += "\t Number of Item : " + allItem.Count; log += "\n Parameter: \n"; log += "\t Algorithm:" + st.cluster_type; log += "\t Epsilon :" + st.epsilon; log += "\t Max Loop :" + st.maxLoop; // Update Success Schedule clusterSchedule.Log = log; dao.updateClusterSchedule(clusterSchedule); dao.commitTransaction(); } catch (Exception ex) { dao.rollbackTransaction(); throw ex; } } return processType; }