/// <summary> /// Compute nearestneighbour with Pearson /// </summary> /// <param name="UserID">Chosen Target User</param> /// <param name="users">Other users</param> /// <returns>Neighbour distances returns into 2D array</returns> public static double[,] ComputeNearestNeighbour(int UserID, Dictionary <int, double[, ]> users, double treshold) { IDistance iDistance = null; iDistance = new Pearson(); int row = users.Count - 1; var distances_neighbours = new double[row, 2]; var list_users = users; foreach (var item in list_users) { if (!item.Key.Equals(UserID)) { var user_id = item.Key; var distance = iDistance.ComputeDistance(users[UserID], users[item.Key]); for (int i = 0; i <= row - 1; i++) { if (distances_neighbours[i, 0] == 0 && distance >= treshold) { distances_neighbours[i, 0] = user_id; distances_neighbours[i, 1] = distance; break; } } } } // forloop voor sorteren distances_neighbours = NeighbourSort(distances_neighbours); return(distances_neighbours); }
[Test()] public void TestComputeCorrelations2() { // load data from disk var user_mapping = new EntityMapping(); var item_mapping = new EntityMapping(); var ratings = RatingPrediction.Read("../../../../data/ml100k/u1.base", user_mapping, item_mapping); Assert.AreEqual(-0.02855815f, Pearson.ComputeCorrelation(ratings, EntityType.ITEM, 45, 311, 200f), 0.00001); }
public Dictionary <int, double> FindNearestNeighbour(Dictionary <int, Dictionary <int, double> > ratings, int targetUser, double threshold, int max, Similarity similarityType) { //Get target user from list of ratings var target = ratings.FirstOrDefault(q => q.Key == targetUser).Value; foreach (var user in ratings) { if (user.Key != targetUser) { double similarity = 0; //Convert ratings of both users to vectors var vectors = new Helper().ConvertToVector(user.Value, target, similarityType); //Calculate similarity based on similarity type switch (similarityType) { case Similarity.Euclidian: similarity = new Euclidian().Calculate(vectors.Item1, vectors.Item2); break; case Similarity.Pearson: similarity = new Pearson().Calculate(vectors.Item1, vectors.Item2); break; case Similarity.Cosine: similarity = new Cosine().Calculate(vectors.Item1, vectors.Item2); break; default: break; } //Check if similarity is above threshold if (similarity > threshold && HasRatedAdditionalItems(user.Value, target)) { if (neighbours.Count < max) { neighbours.Add(user.Key, similarity); } else { var lowest = neighbours.OrderBy(o => o.Value).First(); if (similarity > lowest.Value) { neighbours.Remove(lowest.Key); neighbours.Add(user.Key, similarity); } } } } } return(neighbours); }
private static void ExercisesLessonOne() { var listX = new List <double>(); listX.Add(4); listX.Add(2); Vector x = new Vector(listX); var listAmy = new List <double>(); listAmy.Add(5); listAmy.Add(5); Vector amy = new Vector(listAmy); var listClara = new List <double>(); listClara.Add(4.75); listClara.Add(4.5); listClara.Add(5); listClara.Add(4.25); listClara.Add(4); Vector clara = new Vector(listClara); var listRobert = new List <double>(); listRobert.Add(4); listRobert.Add(3); listRobert.Add(5); listRobert.Add(2); listRobert.Add(1); Vector robert = new Vector(listRobert); var listAmy2 = new List <double>(); listAmy2.Add(3.0); Vector amyTwo = new Vector(listAmy2); var listX2 = new List <double>(); listX2.Add(5.0); listX2.Add(2.5); listX2.Add(2.0); Vector xTwo = new Vector(listX2); var one = new Euclidian().Calculate(amy, x); var two = new Manhattan().Calculate(amy, x); var three = new Pearson().Calculate(clara, robert); var four = new Cosine().Calculate(clara, robert); var five = new Cosine().Calculate(amyTwo, xTwo); Console.WriteLine("\nEuclidian between Amy and X = " + one); Console.WriteLine("Manhattan between Amy and X = " + two); Console.WriteLine("Pearson between Clara and Robert = " + three); Console.WriteLine("Cosine between Clara and Robert = " + four); Console.WriteLine("Cosine between Amy and X (incomplete) = " + five); }
/// protected override void RetrainItem(int item_id) { base.RetrainUser(item_id); if (UpdateItems) { for (int i = 0; i <= MaxItemID; i++) { correlation[item_id, i] = Pearson.ComputeCorrelation(ratings, EntityType.ITEM, item_id, i, Shrinkage); } } }
/// protected override void RetrainUser(int user_id) { base.RetrainUser(user_id); if (UpdateUsers) { for (int i = 0; i <= MaxUserID; i++) { correlation[user_id, i] = Pearson.ComputeCorrelation(ratings, EntityType.USER, user_id, i, Shrinkage); } } }
[Test()] public void TestComputeCorrelations2() { // load data from disk var user_mapping = new Mapping(); var item_mapping = new Mapping(); var ratings = RatingData.Read("../../../../data/ml-100k/u1.base", user_mapping, item_mapping); var p = new Pearson(ratings.AllUsers.Count, 200f); Assert.AreEqual(-0.02788301f, p.ComputeCorrelation(ratings, EntityType.ITEM, 45, 311), 0.00001); }
private static void pearson_05_pdf_test() //****************************************************************************80 // // Purpose: // // PEARSON_05_PDF_TEST tests PEARSON_05_PDF. // // Licensing: // // This code is distributed under the GNU LGPL license. // // Modified: // // 11 April 2016 // // Author: // // John Burkardt // { Console.WriteLine(""); Console.WriteLine("PEARSON_05_PDF"); Console.WriteLine(" PEARSON_05_PDF evaluates the Pearson 05 PDF."); const double x = 5.0; const double a = 1.0; const double b = 2.0; const double c = 3.0; Console.WriteLine(""); Console.WriteLine(" PDF parameter A = " + a + ""); Console.WriteLine(" PDF parameter B = " + b + ""); Console.WriteLine(" PDF parameter C = " + c + ""); if (!Pearson.pearson_05_check(a, b, c)) { Console.WriteLine(""); Console.WriteLine("PEARSON_05_PDF - Fatal error!"); Console.WriteLine(" The parameters are not legal."); return; } double pdf = Pearson.pearson_05_pdf(x, a, b, c); Console.WriteLine(""); Console.WriteLine(" PDF argument X = " + x + ""); Console.WriteLine(" PDF value = " + pdf + ""); }
[Test()] public void TestComputeCorrelation() { // create test objects var ratings = new Ratings(); ratings.Add(0, 1, 0.3); ratings.Add(0, 4, 0.2); ratings.Add(1, 2, 0.6); ratings.Add(1, 3, 0.4); ratings.Add(1, 4, 0.2); // test Assert.AreEqual(0, Pearson.ComputeCorrelation(ratings, EntityType.USER, 0, 1, 0)); }
private static void AssignmentOnedotOne(Dictionary <int, Dictionary <int, double> > ratings) { //Get ratings from user three and four var userThree = ratings.FirstOrDefault(s => s.Key == 3).Value; var userFour = ratings.FirstOrDefault(s => s.Key == 4).Value; //Convert ratings to vectors var vectors = new Helper().ConvertToVector(userThree, userFour, Similarity.Pearson); //Calculate pearson coefficient var result = new Pearson().Calculate(vectors.Item1, vectors.Item2); Console.WriteLine("\n\n## Assignment 1.1"); Console.WriteLine("------------------------------------------"); Console.WriteLine(String.Format("Pearson coefficient between user {0} and user {1} is {2}", 3, 4, result)); }
[Test()] public void TestComputeCorrelation() { // create test objects var ratings = new Ratings(); ratings.Add(0, 1, 0.3f); ratings.Add(0, 4, 0.2f); ratings.Add(1, 2, 0.6f); ratings.Add(1, 3, 0.4f); ratings.Add(1, 4, 0.2f); // test var p = new Pearson(ratings.AllUsers.Count, 0f); Assert.AreEqual(0, p.ComputeCorrelation(ratings, EntityType.USER, 0, 1)); }
[Test()] public void TestCreate() { // create test objects var ratings = new Ratings(); ratings.Add(0, 1, 0.3); ratings.Add(0, 2, 0.6); ratings.Add(0, 4, 0.2); ratings.Add(1, 3, 0.4); ratings.Add(1, 4, 0.2); ratings.Add(2, 0, 0.1); ratings.Add(2, 1, 0.3); // test var pearson = Pearson.Create(ratings, EntityType.USER, 0f); Assert.AreEqual(0, pearson[0, 1]); }
[Test()] public void TestComputeCorrelations() { // create test objects var pearson = new Pearson(3, 0f); var rating_data = new Ratings(); rating_data.Add(0, 1, 0.3f); rating_data.Add(0, 2, 0.6f); rating_data.Add(0, 4, 0.2f); rating_data.Add(1, 3, 0.4f); rating_data.Add(1, 4, 0.2f); rating_data.Add(2, 0, 0.1f); rating_data.Add(2, 1, 0.3f); // test pearson.Shrinkage = 0; pearson.ComputeCorrelations(rating_data, EntityType.USER); Assert.AreEqual(0, pearson[0, 2]); }
[Test()] public void TestCreate() { var ratings = new Ratings(); ratings.Add(0, 1, 0.3f); ratings.Add(0, 2, 0.6f); ratings.Add(0, 4, 0.2f); ratings.Add(1, 3, 0.4f); ratings.Add(1, 4, 0.2f); ratings.Add(2, 0, 0.1f); ratings.Add(2, 1, 0.3f); var correlation_matrix = new Pearson(ratings.MaxUserID + 1, 0f); correlation_matrix.ComputeCorrelations(ratings, EntityType.USER); Assert.AreEqual(3, correlation_matrix.NumberOfRows); Assert.IsTrue(correlation_matrix.IsSymmetric); Assert.AreEqual(0, correlation_matrix[0, 1]); }
static void Main(string[] args) { Dictionary <int, List <UserPreferences> > dataSet; SelectFile selectFile = new SelectFile(); selectFile.selectDataSet();// FileReader newFile = new FileReader(); dataSet = newFile.selectBigDataset(); Console.WriteLine("------ Pearson ------"); Pearson pearsonTest = new Pearson(dataSet); for (int i = 1; i < dataSet.Count; i++) { Console.WriteLine("user: "******" | vs the rest!: " + pearsonTest.Calculate(i)); } Console.WriteLine("------ Euclidean ------"); Euclidean euclideanTest = new Euclidean(dataSet); for (int i = 1; i < dataSet.Count; i++) { Console.WriteLine(euclideanTest.Calculate(i)); } Console.WriteLine("------ Cosine ------"); Cosine cosineTest = new Cosine(dataSet); for (int i = 1; i < dataSet.Count; i++) { Console.WriteLine(cosineTest.Calculate(i)); } Console.WriteLine("Oopsiepoepsie"); Console.ReadKey(); }
static void Main(string[] args) { GenericList<double> xx = new GenericList<double>(); xx.Add(90); xx.Add(160); xx.Add(280); xx.Add(450); xx.Add(550); GenericList<double> yy = new GenericList<double>(); xx.Add(1); xx.Add(2); xx.Add(3); xx.Add(4); xx.Add(50); double[] x = { 90, 160, 280, 450, 550 }; double[] y = {1, 2, 3, 4, 5}; Pearson pearson = new Pearson(); Console.WriteLine(pearson.Korelacja(x, y); Console.ReadKey(); }
/// public override void Train() { base.Train(); this.correlation = Pearson.Create(ratings, EntityType.USER, Shrinkage); }
public static void Run() { int yearsToAggregate = 300; bool expectedUtility = false; double[] prtps = { 0.001, 0.01, 0.03 }; double[] etas = { 1.0, 1.5, 2.0 }; WelfareType[] swfs = { WelfareType.Global, WelfareType.Utilitarian, WelfareType.Regional, WelfareType.Tol, WelfareType.Pearce }; var stopwatch = new Stopwatch(); stopwatch.Start(); Console.WriteLine("Computing base CpC"); var baseCpC = Compute2010CpC(); Console.WriteLine("Doing a run with monteCarloRuns={0}, yearsToAggregate={1}", monteCarloRuns, yearsToAggregate); var runConfigs = (from prtp in prtps from eta in etas from swf in swfs select new RunConfig() { Prtp = prtp, Eta = eta, SWF = swf }).ToArray(); string[] regions = new string[16]; var parameterDefinition = new Parameters(); parameterDefinition.ReadExcelFile(@"Data\Parameter - base.xlsm"); // Run model once to prep for multi tasking { var m = new Esmf.Model.ModelTyped <FundWorkflow>(); m.Run(parameterDefinition.GetBestGuess()); } var relevantKeys = new List <ParameterElementKey>(); foreach (var p in parameterDefinition.GetElements()) { if (p is ParameterElement <double> ) { if (!(p is ParameterElementConstant <double>)) { relevantKeys.Add(p.Key); } } } var yValues = new DoubleArray[runConfigs.Length, 16]; var correlations = new Dictionary <ParameterElementKey, double> [runConfigs.Length, 16]; var regressions = new Dictionary <ParameterElementKey, double> [runConfigs.Length, 16]; var regressionsConfIntervLow = new Dictionary <ParameterElementKey, double> [runConfigs.Length, 16]; var regressionsConfIntervHigh = new Dictionary <ParameterElementKey, double> [runConfigs.Length, 16]; var regressionsFStat = new DoubleArray(runConfigs.Length, 16); var regressionsPVal = new DoubleArray(runConfigs.Length, 16); var regressionsRsq = new DoubleArray(runConfigs.Length, 16); var xValues = new DoubleArray(monteCarloRuns, relevantKeys.Count); var standardizedXValues = new DoubleArray(monteCarloRuns, relevantKeys.Count); var standardizedYValues = new DoubleArray[runConfigs.Length, 16]; for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { yValues[i, l] = new DoubleArray(monteCarloRuns, 1); standardizedYValues[i, l] = new DoubleArray(monteCarloRuns); correlations[i, l] = new Dictionary <ParameterElementKey, double>(); regressions[i, l] = new Dictionary <ParameterElementKey, double>(); regressionsConfIntervHigh[i, l] = new Dictionary <ParameterElementKey, double>(); regressionsConfIntervLow[i, l] = new Dictionary <ParameterElementKey, double>(); } } var xMeans = new DoubleArray(relevantKeys.Count); var xStd = new DoubleArray(relevantKeys.Count); var rand = new MersenneTwister(); int currentRun = 0; { var m = new MarginalDamage2() { EmissionYear = Timestep.FromYear(2010), Gas = MarginalGas.C, Parameters = parameterDefinition.GetBestGuess(), YearsToAggregate = yearsToAggregate, GlobalCpCAtBase = baseCpC.Item1, ExpectedUtilityMode = expectedUtility, AdditionalInitMethod = (Esmf.Model.Model fw) => { fw["scenariouncertainty"].Parameters["timeofuncertaintystart"].SetValue(Timestep.FromYear(2010)); } }; for (int i = 0; i < 16; i++) { m.RegionalCpCAtBase[i] = baseCpC.Item2[i]; } for (int i = 0; i < runConfigs.Length; i++) { m.SWFs.Add(new WelfareSpec(runConfigs[i].SWF, runConfigs[i].Prtp, runConfigs[i].Eta)); } m.Start(); } Parallel.ForEach( parameterDefinition.GetRandom(rand, monteCarloRuns), () => { Thread.CurrentThread.CurrentCulture = CultureInfo.InvariantCulture; Thread.CurrentThread.Priority = ThreadPriority.BelowNormal; return(0); }, (pv, pls, dummy) => { int tempCurrentCount = Interlocked.Increment(ref currentRun); Console.Write("\rRun {0} ", tempCurrentCount); var m = new MarginalDamage2() { EmissionYear = Timestep.FromYear(2010), Gas = MarginalGas.C, Parameters = pv, YearsToAggregate = yearsToAggregate, GlobalCpCAtBase = baseCpC.Item1, ExpectedUtilityMode = false, AdditionalInitMethod = (Esmf.Model.Model fw) => { fw["scenariouncertainty"].Parameters["timeofuncertaintystart"].SetValue(Timestep.FromYear(2010)); } }; for (int i = 0; i < 16; i++) { m.RegionalCpCAtBase[i] = baseCpC.Item2[i]; } for (int i = 0; i < runConfigs.Length; i++) { m.SWFs.Add(new WelfareSpec(runConfigs[i].SWF, runConfigs[i].Prtp, runConfigs[i].Eta)); } m.Start(); var dimensions = m.Result1.Dimensions; for (int i = 0; i < runConfigs.Length; i++) { foreach (var r in dimensions.GetValues <Region>()) { regions[r.Index] = r.ToString(); yValues[i, r.Index][(int)pv.RunId - 1] = m.SCCs[i][r]; } } for (int l = 0; l < relevantKeys.Count; l++) { var p = relevantKeys[l]; double val = ((ParameterValueElement <double>)pv.GetElementByKey(p)).Value; xValues[pv.RunId.Value - 1, l] = val; } return(0); }, (dummy) => { }); Console.WriteLine(); Console.WriteLine(); if (!Directory.Exists("Output")) { Directory.CreateDirectory("Output"); } Console.WriteLine("Write summary"); // Output summary using (var f = File.CreateText(@"Output\summary.csv")) { f.WriteLine("Prtp;Eta;Swf;SccRegion;Mean;Variance;StdDev;StandardError"); for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { double standardError = yValues[i, l].Std() / Math.Sqrt(monteCarloRuns); f.WriteLine("{0};{1};{2:f3};{3:f3};{4:f3};{5:f3}", runConfigs[i], regions[l], yValues[i, l].Mean(), yValues[i, l].Var(), yValues[i, l].Std(), standardError); } } } Console.WriteLine("Compute correlations"); // Compute correlations for (int k = 0; k < relevantKeys.Count; k++) { var xSlice = xValues.GetCol(k); for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { // Compute correlation between parameter i and SCC var pearson = new Pearson(yValues[i, l], xSlice); double corr = (double)pearson.Rho; correlations[i, l].Add(relevantKeys[k], corr); } } } Console.WriteLine("Compute regressions"); // Standardize everything for (int k = 0; k < relevantKeys.Count; k++) { var xSlice = xValues.GetCol(k); xMeans[k] = xSlice.Mean(); xStd[k] = xSlice.Std(); } for (int k = 0; k < monteCarloRuns; k++) { for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { standardizedYValues[i, l][k] = (yValues[i, l][k] - yValues[i, l].Mean()) / yValues[i, l].Std(); } } for (int l = 0; l < relevantKeys.Count; l++) { standardizedXValues[k, l] = (xValues[k, l] - xMeans[l]) / xStd[l]; } } // Compute regression for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { var regress = new Regress(standardizedYValues[i, l], standardizedXValues, 0.1); regressionsFStat[i, l] = regress.FStat; regressionsPVal[i, l] = regress.PVal; regressionsRsq[i, l] = regress.Rsq; for (int k = 0; k < relevantKeys.Count; k++) { regressions[i, l].Add(relevantKeys[k], regress.Beta[k + 1]); regressionsConfIntervLow[i, l].Add(relevantKeys[k], regress.BetaInt[k + 1, 0]); regressionsConfIntervHigh[i, l].Add(relevantKeys[k], regress.BetaInt[k + 1, 1]); } } } Console.WriteLine("Write regression summary"); // Write regression summaries using (var f = File.CreateText(@"Output\regression summary.csv")) { f.WriteLine("Prtp;Eta;Swf;SccRegion;FStat;PVal;Rsq"); for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { f.WriteLine("{0};{1};{2:f15};{3:f15};{4:f15}", runConfigs[i], regions[l], regressionsFStat[i, l], regressionsPVal[i, l], regressionsRsq[i, l]); } } } Console.WriteLine("Write correlation"); // Write correlation using (var f = File.CreateText(@"Output\correlation.csv")) { f.WriteLine("Prtp;Eta;Swf;SccRegion;Name;Correlation;RegressCoefficient;RegressConfIntLower;RegressConfIntUpper"); for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { foreach (var key in relevantKeys) { string s = key.Name; if (key is ParameterElementKey1Dimensional) { s += "-" + regions[((ParameterElementKey1Dimensional)key).D1]; } else if (key is ParameterElementKey2Dimensional) { s += "-" + regions[((ParameterElementKey2Dimensional)key).D1] + "-" + regions[((ParameterElementKey2Dimensional)key).D2]; } f.WriteLine("{0};{1};\"{2}\";{3:f10};{4:f10};{5:f10};{6:f10}", runConfigs[i], regions[l], s, correlations[i, l][key], regressions[i, l][key], regressionsConfIntervLow[i, l][key], regressionsConfIntervHigh[i, l][key]); } } } } Console.WriteLine("Write SCC values"); using (var f = File.CreateText(@"Output\scc values.csv")) { f.WriteLine("Prtp;Eta;Swf;SccRegion;RundId;Scc"); for (int i = 0; i < runConfigs.Length; i++) { for (int l = 0; l < 16; l++) { for (int k = 0; k < monteCarloRuns; k++) { double standardError = yValues[i, l].Std() / Math.Sqrt(monteCarloRuns); f.WriteLine("{0};{1};{2};{3:f14}", runConfigs[i], regions[l], k, yValues[i, l][k]); } } } } stopwatch.Stop(); Console.WriteLine(stopwatch.Elapsed); }
/// public override void Train() { base.Train(); this.correlation = Pearson.Create(ratings, EntityType.ITEM, Shrinkage); this.GetPositivelyCorrelatedEntities = Utils.Memoize <int, IList <int> >(correlation.GetPositivelyCorrelatedEntities); }
static void Main(string[] args) { var t = new int[] { 5, 3, 4, 4 }; var m = new int[][] { new int[] { 3, 1, 2, 3, 3 }, new int[] { 4, 3, 4, 3, 5 }, new int[] { 3, 3, 1, 5, 4 }, new int[] { 1, 5, 5, 2, 1 } }; for (int i = 0; i < m.Length; i++) { var user1 = new List <Tuple <int, double> >(); var user2 = new List <Tuple <int, double> >(); for (int j = 0; j < m[i].Length; j++) { if (t.Length > j) { user1.Add(new Tuple <int, double>(j, (double)t[j])); } user2.Add(new Tuple <int, double>(j, (double)m[i][j])); } var coefficient = Pearson.calculate(ListModule.OfSeq(user1), ListModule.OfSeq(user2)); Console.WriteLine(coefficient); } var ratings = new double[m[0].Length][]; for (int i = 0; i < m[0].Length; i++) { ratings[i] = new double[m.Length]; for (int j = 0; j < m.Length; j++) { ratings[i][j] = m[j][i]; Console.Write(m[j][i]); } Console.WriteLine(); } for (int i = 0; i < ratings.Length; i++) { for (int j = 0; j < ratings.Length; j++) { var cosine = Cosine.calculate(ListModule.OfSeq(ratings[i]), ListModule.OfSeq(ratings[j])); Console.WriteLine("Between #" + i + " and #" + j + ": " + cosine); } } //var user1 = new List<Tuple<int, double>>(); //new FSharpList<Tuple<int, double>>(new Tuple<int, double>(5, 1), null); //user1.Add(new Tuple<int, double>(1, 5)); //user1.Add(new Tuple<int, double>(3, 5)); //user1.Add(new Tuple<int, double>(2, 5)); //user1.Add(new Tuple<int, double>(6, 1)); //user1.Add(new Tuple<int, double>(4, 5)); //user1.Add(new Tuple<int, double>(5, 1)); //var user2 = new List<Tuple<int, double>>(); //new FSharpList<Tuple<int, double>>(new Tuple<int, double>(5, 1), null); //user2.Add(new Tuple<int, double>(3, 1)); //user2.Add(new Tuple<int, double>(6, 5)); //user2.Add(new Tuple<int, double>(1, 1)); //user2.Add(new Tuple<int, double>(5, 5)); //user1.ForEach(u => Console.WriteLine(String.Format("u1: {0} ", u))); //user2.ForEach(u => Console.WriteLine(String.Format("u2: {0} ", u))); //var coefficient = Pearsons.calculate(ListModule.OfSeq(user1), ListModule.OfSeq(user2)); //Console.WriteLine(coefficient); //var coefficient = Pearsons.calculate(ListModule.OfSeq(user1), ListModule.OfSeq(user2)).ToList(); //coefficient.ForEach(u => Console.WriteLine(String.Format("cofficient: {0} ", u))); Console.ReadLine(); }
static void Main(string[] args) { String dataFilePath = args[0]; String weightsFilePath = args[1]; String outputFilePath = args[2]; int metric = Int32.Parse(args[3]); int numThreads = Int32.Parse(args[4]); Stopwatch w = new Stopwatch(); w.Start(); List <double[]> samples = CSVIO.Load <double>(dataFilePath); List <double[]> weights = CSVIO.Load <double>(weightsFilePath); w.Stop(); long loadingMS = w.ElapsedMilliseconds; w.Reset(); w.Start(); DistanceFunctions distFunc = distFunc = new SquareEuclideanDistanceFunction(); if (metric == 2) { distFunc = new Cosine(); } if (metric == 3) { distFunc = new Pearson(); } Console.WriteLine("Using distance function with brute force: {0} and numthreads: {1}", metric, numThreads); NNAlgorithm nnMethod = null; // if euclidean then can use fast kdtree if (metric == 1) { nnMethod = new KDTreeNN(weights, distFunc); } else { nnMethod = new BruteForceNN(weights, distFunc); } List <int[]> nearestNeighbours = NearestNeighbour.GetNearestNeighbours(samples, nnMethod, numThreads); w.Stop(); long vqMS = w.ElapsedMilliseconds; w.Reset(); w.Start(); CSVIO.Save <int>(outputFilePath, nearestNeighbours); w.Stop(); long savingMS = w.ElapsedMilliseconds; Console.WriteLine("Loading Time: {0} NN Time: {1} Saving Time: {2}", loadingMS, vqMS, savingMS); }
static void Main(string[] args) { Console.WriteLine("|| Assignment 1: User-Item ||"); Console.WriteLine(); var ratings = Parser.Parse(',', "./userItem.data"); var commonRatings3and4 = CommonRatings.getCommonRatings(ratings[3], ratings[4]); var pearson = new Pearson(); var pearson3and4 = pearson.getSimilarity(commonRatings3and4.Item1, commonRatings3and4.Item2); Console.WriteLine("// Pearson coefficient of similarity between users 3 and 4: " + pearson3and4); Console.WriteLine(); Console.WriteLine("// Nearest neighbours and similarities for user 7 (using pearson): "); var nearestNeighbourUser7Pearson = new NearestNeighbours(ratings, 7, new Pearson(), 3, 0.35); foreach (var nearestNeighbor in nearestNeighbourUser7Pearson.nearestNeighbours.OrderByDescending(x => x.Value)) { Console.WriteLine("User " + nearestNeighbor.Key + " = Similarity: " + nearestNeighbor.Value); } Console.WriteLine("// Nearest neighbours and similarities for user 7 (using cosine): "); var nearestNeighbourUser7Cosine = new NearestNeighbours(ratings, 7, new Cosine(), 3, 0.35); foreach (var nearestNeighbor in nearestNeighbourUser7Cosine.nearestNeighbours.OrderByDescending(x => x.Value)) { Console.WriteLine("User " + nearestNeighbor.Key + " = Similarity: " + nearestNeighbor.Value); } Console.WriteLine("// Nearest neighbours and similarities for user 7 (using euclidean): "); var nearestNeighbourUser7Euclidean = new NearestNeighbours(ratings, 7, new Euclidean(), 3, 0.35); foreach (var nearestNeighbor in nearestNeighbourUser7Euclidean.nearestNeighbours.OrderByDescending(x => x.Value)) { Console.WriteLine("User " + nearestNeighbor.Key + " = Similarity: " + nearestNeighbor.Value); } Console.WriteLine(); Console.WriteLine("// Predicted ratings for user 7 (using pearson): "); var predictedRatingUser7item101 = Prediction.getPrediction(ratings, nearestNeighbourUser7Pearson.nearestNeighbours, 7, 101); Console.WriteLine("Item 101 has a predicted rating of: " + predictedRatingUser7item101); var predictedRatingUser7item103 = Prediction.getPrediction(ratings, nearestNeighbourUser7Pearson.nearestNeighbours, 7, 103); Console.WriteLine("Item 103 has a predicted rating of: " + predictedRatingUser7item103); var predictedRatingUser7item106 = Prediction.getPrediction(ratings, nearestNeighbourUser7Pearson.nearestNeighbours, 7, 106); Console.WriteLine("Item 106 has a predicted rating of: " + predictedRatingUser7item106); Console.WriteLine(); Console.WriteLine("// Predicted Rating for user 4 (using pearson): "); var nearestNeighbourUser4Pearson = new NearestNeighbours(ratings, 4, new Pearson(), 3, 0.35); var predictedRatingUser4item101 = Prediction.getPrediction(ratings, nearestNeighbourUser4Pearson.nearestNeighbours, 4, 101); Console.WriteLine("Item 101 has a predicted rating of: " + predictedRatingUser4item101); Console.WriteLine(); Console.WriteLine("// Adjusted 106 Rating to 2.8 for user 7 (using pearson) gives predictions: "); ratings[7][106] = 2.8; var nearestNeighbourUser7PearsonAdj1 = new NearestNeighbours(ratings, 7, new Pearson(), 3, 0.35); var predictedRatingUser7Adjusted1item101 = Prediction.getPrediction(ratings, nearestNeighbourUser7PearsonAdj1.nearestNeighbours, 7, 101); var predictedRatingUser7Adjusted1item103 = Prediction.getPrediction(ratings, nearestNeighbourUser7PearsonAdj1.nearestNeighbours, 7, 103); Console.WriteLine("Item 101 has a predicted rating of: " + predictedRatingUser7Adjusted1item101); Console.WriteLine("Item 103 has a predicted rating of: " + predictedRatingUser7Adjusted1item103); Console.WriteLine(); Console.WriteLine("// Adjusted 106 Rating to 5 for user 7 (using pearson) gives predictions: "); ratings[7][106] = 5; var nearestNeighbourUser7PearsonAdj2 = new NearestNeighbours(ratings, 7, new Pearson(), 3, 0.35); var predictedRatingUser7Adjusted2item101 = Prediction.getPrediction(ratings, nearestNeighbourUser7PearsonAdj2.nearestNeighbours, 7, 101); var predictedRatingUser7Adjusted2item103 = Prediction.getPrediction(ratings, nearestNeighbourUser7PearsonAdj2.nearestNeighbours, 7, 103); Console.WriteLine("Item 101 has a predicted rating of: " + predictedRatingUser7Adjusted2item101); Console.WriteLine("Item 103 has a predicted rating of: " + predictedRatingUser7Adjusted2item103); Console.WriteLine(); var movieRatings = Parser.Parse('\t', "u.data"); Console.WriteLine("// After importing the movie dataset, the top 8 recommendations for user 186: "); var movieUser186Pearson = new NearestNeighbours(movieRatings, 186, new Pearson(), 25, 0.35); var movieUser186PearsonTop8 = Prediction.getTopPredictions(movieRatings, movieUser186Pearson.nearestNeighbours, 186, 0, 8); foreach (var prediction in movieUser186PearsonTop8) { Console.WriteLine("Movie: {0} has a rating of {1}", prediction.Key, prediction.Value); } Console.WriteLine(); Console.WriteLine("// The top 8 recommendations for user 186 with at least three nearest neighbours: "); var movieUser186PearsonTop8Adj = Prediction.getTopPredictions(movieRatings, movieUser186Pearson.nearestNeighbours, 186, 3, 8); foreach (var prediction in movieUser186PearsonTop8Adj) { Console.WriteLine("Movie: {0} has a rating of {1}", prediction.Key, prediction.Value); } Console.WriteLine(); //Console.WriteLine("Parsed Data file: "); //foreach (var outerEntry in ratings) //{ // foreach (var innerEntry in outerEntry.Value) // { // Console.WriteLine("User:{0} Key:{1} Value:{2}", outerEntry.Key, innerEntry.Key, innerEntry.Value); // } //} Console.ReadLine(); }
/// <summary> /// User gets number of choices to compute similaries and recommendations /// </summary> public static void PickAlgorithm() { IDistance iDistance = null; IReader iReader = null; Console.WriteLine("Pick 1 for Euclidean"); Console.WriteLine("Pick 2 for Pearson"); Console.WriteLine("Pick 3 for Cosine"); Console.WriteLine("Pick 4 for Recommendation"); Console.WriteLine("Pick 5 for NearestNeighbor"); choice = int.Parse(Console.ReadLine()); Console.WriteLine("Pick 1 for Basic Dataset"); Console.WriteLine("Pick 2 for Advanced Dataset"); choiceData = int.Parse(Console.ReadLine()); Dictionary <int, double[, ]> dictionaryBasic = new Dictionary <int, double[, ]>(); Dictionary <int, double[, ]> dictionaryAdvanced = new Dictionary <int, double[, ]>(); switch (choiceData) { case 1: iReader = new FileReader(); dictionaryBasic = iReader.GetData(); break; case 2: iReader = new DataReader(); dictionaryAdvanced = iReader.GetData(); break; default: Console.WriteLine("Closed"); Console.ReadLine(); break; } switch (choice) { case 1: PickTargetUsers(); Console.WriteLine("You have chosen Euclidian"); if (choiceData == 1) { iDistance = new Euclidean(); IterateSimilarity(dictionaryBasic, targetUser, iDistance); } else { iDistance = new Euclidean(); IterateSimilarity(dictionaryAdvanced, targetUser, iDistance); } break; case 2: PickTargetUsers(); Console.WriteLine("You have chosen Pearson"); if (choiceData == 1) { iDistance = new Pearson(); IterateSimilarity(dictionaryBasic, targetUser, iDistance); } else { iDistance = new Pearson(); IterateSimilarity(dictionaryAdvanced, targetUser, iDistance); } break; case 3: PickTargetUsers(); Console.WriteLine("You have chosen Cosine"); if (choiceData == 1) { iDistance = new Cosine(); IterateSimilarity(dictionaryBasic, targetUser, iDistance); } else { iDistance = new Cosine(); IterateSimilarity(dictionaryAdvanced, targetUser, iDistance); } break; case 4: Console.WriteLine("Select Targeted User"); targetUser = int.Parse(Console.ReadLine()); Console.WriteLine("You have chosen Recommendation"); Console.WriteLine("Select Top numbers of ranking"); int k = int.Parse(Console.ReadLine()); Console.WriteLine("Set up your threshold"); double threshold = double.Parse(Console.ReadLine()); if (choiceData == 1) { Recommender.NearestNeighbour.ComputeRecommendations(targetUser, dictionaryBasic, k, threshold); } else { Recommender.NearestNeighbour.ComputeRecommendations(targetUser, dictionaryAdvanced, k, threshold); } break; case 5: Console.WriteLine("Select Targeted User"); targetUser = int.Parse(Console.ReadLine()); Console.WriteLine("You have chosen NearestNeighbor"); Console.WriteLine("Select K"); k = int.Parse(Console.ReadLine()); Console.WriteLine("Set up your threshold"); threshold = double.Parse(Console.ReadLine()); iDistance = new Pearson(); if (choiceData == 1) { RecommendNearestNeighbour.Run(threshold, targetUser, iDistance, dictionaryBasic, k); } else { Console.WriteLine("How many users"); int amount = int.Parse(Console.ReadLine()); RecommendNearestNeighbour.Run(threshold, targetUser, iDistance, dictionaryAdvanced.OrderBy(x => x.Key).Take(amount).ToDictionary(pair => pair.Key, pair => pair.Value), k); } break; default: Console.WriteLine("Closed"); Console.ReadLine(); break; } }
IEnumerator CalculatePearson() { while (!_shouldStop) { _pearsonIsRunning = true; TimeSpan calcStart = new TimeSpan(); calcStart = PupilGazeTracker.Instance._globalTime; List <MovingObject> _tempObjects = new List <MovingObject>(); // work with copies to (hopefully) improve performance _cloningInProgress = true; foreach (MovingObject mo in sceneObjects) { _tempObjects.Add((MovingObject)mo.Clone()); // } MovingObject _tempGaze = (MovingObject)gazeTrajectory.Clone(); _cloningInProgress = false; List <double> _tempXPgaze = new List <double>(_tempGaze.getXPoints()); List <double> _tempYPgaze = new List <double>(_tempGaze.getYPoints()); List <float> results = new List <float>(); foreach (MovingObject mo in _tempObjects) { // temporary list for not having to generate a new one at every loop List <double> _tempXPObj = new List <double>(mo.getXPoints()); List <double> _tempYPObj = new List <double>(mo.getYPoints()); // surround calculation with try/catch block or else coroutine will end if something is divided by zero try { double coeffX = Pearson.calculatePearson(_tempXPgaze, _tempXPObj); double coeffY = Pearson.calculatePearson(_tempYPgaze, _tempYPObj); // in cases where an object only moves along one axis, replace NaN with 0 if (double.IsNaN(coeffX)) { coeffX = 0; } if (double.IsNaN(coeffY)) { coeffY = 0; } // add result to the original list results.Add((float)sceneObjects.Find(x => x.Equals(mo)).addSample(calcStart, (coeffX + coeffY) / 2, corrWindow)); correlationWriter.WriteLine(mo.name + ";" + calcStart.TotalSeconds + ";" + coeffX + ";" + coeffY + ";" + w + ";" + corrWindow + ";" + corrFrequency + ";" + Coefficient + ";" + Gaze); } catch (Exception e) { Debug.LogError("Out of bounds:" + e.StackTrace); } } //activate only one item at a time for (int i = 0; i < results.Count; i++) { // activate the object with the highest correlation value only if it's above pearsonThreshold if (results[i].CompareTo(results.Max()) == 0 && results[i] > threshold / 2) { _tempObjects[i].activate(true); //doesn't matter if original or clone list is used as both refer to the same GameObject } else { _tempObjects[i].activate(false); } } calcDur = PupilGazeTracker.Instance._globalTime - calcStart; yield return(new WaitForSeconds(corrFrequency - (float)calcDur.TotalSeconds)); // calculation should take place every x seconds } }