// Commandline Argument: 0,3,4,5,6 5 16 C:\Users\dilet\Desktop\TwitterDB\RWR_EGO_RESULT.txt C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Analysis.txt C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Classification.txt static void Main(string[] args) { // Select Columns for decision attributes int[] columnNumberList = Array.ConvertAll<string, int>(args[0].Split(','), new Converter<string, int>(int.Parse)); SortedList columnList = new SortedList(); // <Column Nmber, Column Name> for (int i = 0; i < columnNumberList.Length; i++) { int columnNumber = columnNumberList[i]; columnList.Add(columnNumber, candidateColumns[columnNumber]); } int nFold = int.Parse(args[1]); int classLabelCount = int.Parse(args[2]); string rwrResultFilePath = args[3]; string egoNetworkAnalysisFilePath = args[4]; string classificationResultFilePath = args[5]; if (File.Exists(classificationResultFilePath)) File.Delete(classificationResultFilePath); // DataPreprocess: Split K-Fold DataSets DataPreprocess dataPreprocess = new DataPreprocess(nFold); dataPreprocess.dataSetConfiguration(columnList, rwrResultFilePath, egoNetworkAnalysisFilePath); // K-Fold Cross Validation double correctPredictRatio, sumOfCorrectPredictRatio = 0.0; double learningError, sumOfLearningError = 0.0; double sumOfMAP = 0.0; for (int k = 0; k < nFold; k++) { // Train & Test DataSet var dataSets = dataPreprocess.getTrainTestSet(k); DataSet trainSet, testSet; trainSet = (DataSet)dataSets.Item1; testSet = (DataSet)dataSets.Item2; // Decision Treee Configuration, Learning & Prediction Classification classification = new Classification(columnList, classLabelCount); learningError = classification.learnDecisionTreeModel(trainSet); sumOfLearningError += learningError; classification.prediction(testSet); // Correct Recommender Prdicted Label Ratio correctPredictRatio = testSet.validation(); sumOfCorrectPredictRatio += correctPredictRatio; // Output Classification Result into File testSet.logClassificationResult(classificationResultFilePath); // Get MAP of TrainSet sumOfMAP += trainSet.MAP(); } double averageCorrectPredictRatio = sumOfCorrectPredictRatio / nFold; double averageLearningError = sumOfLearningError / nFold; double averageMAP = sumOfMAP / nFold; Console.WriteLine("Average Learning Error: {0:F15}", averageLearningError); Console.WriteLine("Correct Predict Ratio: {0:F15}", averageCorrectPredictRatio); Console.WriteLine("Average MAP: {0:F15}", averageMAP); }
// Commandline Argument: 0,3,4,5,6 5 16 C:\Users\dilet\Desktop\TwitterDB\RWR_EGO_RESULT.txt C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Analysis.txt C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Classification.txt static void Main(string[] args) { // Select Columns for decision attributes int[] columnNumberList = Array.ConvertAll <string, int>(args[0].Split(','), new Converter <string, int>(int.Parse)); SortedList columnList = new SortedList(); // <Column Nmber, Column Name> for (int i = 0; i < columnNumberList.Length; i++) { int columnNumber = columnNumberList[i]; columnList.Add(columnNumber, candidateColumns[columnNumber]); } int nFold = int.Parse(args[1]); int classLabelCount = int.Parse(args[2]); string rwrResultFilePath = args[3]; string egoNetworkAnalysisFilePath = args[4]; string classificationResultFilePath = args[5]; if (File.Exists(classificationResultFilePath)) { File.Delete(classificationResultFilePath); } // DataPreprocess: Split K-Fold DataSets DataPreprocess dataPreprocess = new DataPreprocess(nFold); dataPreprocess.dataSetConfiguration(columnList, rwrResultFilePath, egoNetworkAnalysisFilePath); // K-Fold Cross Validation double correctPredictRatio, sumOfCorrectPredictRatio = 0.0; double learningError, sumOfLearningError = 0.0; double sumOfMAP = 0.0; for (int k = 0; k < nFold; k++) { // Train & Test DataSet var dataSets = dataPreprocess.getTrainTestSet(k); DataSet trainSet, testSet; trainSet = (DataSet)dataSets.Item1; testSet = (DataSet)dataSets.Item2; // Decision Treee Configuration, Learning & Prediction Classification classification = new Classification(columnList, classLabelCount); learningError = classification.learnDecisionTreeModel(trainSet); sumOfLearningError += learningError; classification.prediction(testSet); // Correct Recommender Prdicted Label Ratio correctPredictRatio = testSet.validation(); sumOfCorrectPredictRatio += correctPredictRatio; // Output Classification Result into File testSet.logClassificationResult(classificationResultFilePath); // Get MAP of TrainSet sumOfMAP += trainSet.MAP(); } double averageCorrectPredictRatio = sumOfCorrectPredictRatio / nFold; double averageLearningError = sumOfLearningError / nFold; double averageMAP = sumOfMAP / nFold; Console.WriteLine("Average Learning Error: {0:F15}", averageLearningError); Console.WriteLine("Correct Predict Ratio: {0:F15}", averageCorrectPredictRatio); Console.WriteLine("Average MAP: {0:F15}", averageMAP); }
// Commandline Argument: C:\Users\dilet\Desktop\TwitterDB C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Analysis.txt 8192 5 16 static void Main(string[] args) { string dirPath = args[0]; string[] rwrFileCollection = Directory.GetFiles(dirPath, "RWR_EGO_RESULT*.txt"); foreach (string rwrFilePath in rwrFileCollection) { // Experiment Environment Setting string egoNetworkAnalysisFilePath = args[1]; int combinationCount = int.Parse(args[2]); int padding = (int)Math.Ceiling(Math.Log(combinationCount, 2.0)); int nFold = int.Parse(args[3]); int classCount = int.Parse(args[4]); string classificationFilePath = dirPath + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(rwrFilePath) + "_CLASSIFICATION.txt"; Console.WriteLine(classificationFilePath); if (File.Exists(classificationFilePath)) File.Delete(classificationFilePath); using (StreamWriter classificationLogger = new StreamWriter(classificationFilePath)) { // Experiment Argument Setting for (int combination = 0; combination < combinationCount; combination++) { string s = Convert.ToString(combination, 2); int[] combinationBitArray = s.PadLeft(padding, '0') // Add 0's from left .Select(c => int.Parse(c.ToString())) // convert each char to int .ToArray(); // Convert IEnumerable from select to Array SortedList columnList = new SortedList(); for (int i = 0; i < combinationBitArray.Length; i++) { if (combinationBitArray[i] == 1) columnList.Add(i, candidateColumns[i]); } // DataPreprocess: Split K-Fold DataSets DataPreprocess dataPreprocess = new DataPreprocess(nFold); dataPreprocess.dataSetConfiguration(columnList, rwrFilePath, egoNetworkAnalysisFilePath); // K-Fold Cross Validation double correctPredictRatio = 0.0, sumOfCorrectPredictRatio = 0.0; double learningError = 0.0, sumOfLearningError = 0.0; double sumOfMAP = 0.0; for (int k = 0; k < nFold; k++) { // Train & Test DataSet var dataSets = dataPreprocess.getTrainTestSet(k); DataSet trainSet, testSet; trainSet = (DataSet)dataSets.Item1; testSet = (DataSet)dataSets.Item2; // Decision Treee Configuration, Learning & Prediction Classification classification = new Classification(columnList, classCount); learningError = classification.learnDecisionTreeModel(trainSet); sumOfLearningError += learningError; classification.prediction(testSet); // Correct Recommender Prdicted Label Ratio correctPredictRatio = testSet.validation(); sumOfCorrectPredictRatio += correctPredictRatio; // Get MAP of TrainSet sumOfMAP += trainSet.MAP(); } double averageMAP = sumOfMAP / nFold; double averageCorrectPredictRatio = sumOfCorrectPredictRatio / nFold; double averageLearning = sumOfLearningError / nFold; classificationLogger.WriteLine("{0}\t{1:F15}\t{2:F15}\t{3:F15}", combination, averageMAP, averageCorrectPredictRatio, 1.0 - averageLearning); } } } }
// Commandline Argument: C:\Users\dilet\Desktop\TwitterDB C:\Users\dilet\Desktop\TwitterDB\EgoNetwork_Analysis.txt 8192 5 16 static void Main(string[] args) { string dirPath = args[0]; string[] rwrFileCollection = Directory.GetFiles(dirPath, "RWR_EGO_RESULT*.txt"); foreach (string rwrFilePath in rwrFileCollection) { // Experiment Environment Setting string egoNetworkAnalysisFilePath = args[1]; int combinationCount = int.Parse(args[2]); int padding = (int)Math.Ceiling(Math.Log(combinationCount, 2.0)); int nFold = int.Parse(args[3]); int classCount = int.Parse(args[4]); string classificationFilePath = dirPath + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(rwrFilePath) + "_CLASSIFICATION.txt"; Console.WriteLine(classificationFilePath); if (File.Exists(classificationFilePath)) { File.Delete(classificationFilePath); } using (StreamWriter classificationLogger = new StreamWriter(classificationFilePath)) { // Experiment Argument Setting for (int combination = 0; combination < combinationCount; combination++) { string s = Convert.ToString(combination, 2); int[] combinationBitArray = s.PadLeft(padding, '0') // Add 0's from left .Select(c => int.Parse(c.ToString())) // convert each char to int .ToArray(); // Convert IEnumerable from select to Array SortedList columnList = new SortedList(); for (int i = 0; i < combinationBitArray.Length; i++) { if (combinationBitArray[i] == 1) { columnList.Add(i, candidateColumns[i]); } } // DataPreprocess: Split K-Fold DataSets DataPreprocess dataPreprocess = new DataPreprocess(nFold); dataPreprocess.dataSetConfiguration(columnList, rwrFilePath, egoNetworkAnalysisFilePath); // K-Fold Cross Validation double correctPredictRatio = 0.0, sumOfCorrectPredictRatio = 0.0; double learningError = 0.0, sumOfLearningError = 0.0; double sumOfMAP = 0.0; for (int k = 0; k < nFold; k++) { // Train & Test DataSet var dataSets = dataPreprocess.getTrainTestSet(k); DataSet trainSet, testSet; trainSet = (DataSet)dataSets.Item1; testSet = (DataSet)dataSets.Item2; // Decision Treee Configuration, Learning & Prediction Classification classification = new Classification(columnList, classCount); learningError = classification.learnDecisionTreeModel(trainSet); sumOfLearningError += learningError; classification.prediction(testSet); // Correct Recommender Prdicted Label Ratio correctPredictRatio = testSet.validation(); sumOfCorrectPredictRatio += correctPredictRatio; // Get MAP of TrainSet sumOfMAP += trainSet.MAP(); } double averageMAP = sumOfMAP / nFold; double averageCorrectPredictRatio = sumOfCorrectPredictRatio / nFold; double averageLearning = sumOfLearningError / nFold; classificationLogger.WriteLine("{0}\t{1:F15}\t{2:F15}\t{3:F15}", combination, averageMAP, averageCorrectPredictRatio, 1.0 - averageLearning); } } } }