private void OnClassifyImage(object sender, EventArgs e) { try { _loggingService.Log("Classify drawing has started"); var imagePreprocessor = new ImagePreprocessor(); double[] pixels = imagePreprocessor.Preprocess(_uploadImageView.Image); IPredictionModel predictionModel = Global.PredictionModel; double[] prediction = predictionModel.Predict(pixels); _uploadImageView.ProcessPrediction(prediction); _loggingService.Log("Classify drawing has completed"); } catch (NullReferenceException exception) { _loggingService.Log(exception); _messageService.ShowMessage("No image was uploaded. Please upload an image and try again.", "Upload error", icon: MessageBoxIcon.Information); } catch (Exception exception) { _loggingService.Log(exception); _messageService.ShowMessage("An error ocurred while classyfing the drawing. Please try again.", "Classification error", icon: MessageBoxIcon.Information); } }
protected virtual void ValidateModel(IPredictionModel model) { if (!(model is IKnnPredictionModel <TPredictionResult>)) { throw new ArgumentException("Invalid prediction model passed for KNN predictor!"); } }
public IList <TPredictionResult> Predict(IDataFrame queryDataFrame, IPredictionModel model, int dependentFeatureIndex) { ValidateModel(model); var knnModel = model as IKnnPredictionModel <TPredictionResult>; var results = new ConcurrentBag <RowIndexDistanceDto <TPredictionResult> >(); var normalizedData = NormalizeData(queryDataFrame, knnModel, dependentFeatureIndex); var normalizedTrainingData = normalizedData.Item1; var queryMatrix = normalizedData.Item2; Parallel.For(0, queryDataFrame.RowCount, queryRowIdx => { var rowVector = queryMatrix.Row(queryRowIdx); var distances = new ConcurrentBag <RowIndexDistanceDto <TPredictionResult> >(); for (int trainingRowIdx = 0; trainingRowIdx < normalizedTrainingData.RowCount; trainingRowIdx++) { var trainingRow = normalizedTrainingData.Row(trainingRowIdx); TPredictionResult dependentFeatureValue = knnModel.ExpectedTrainingOutcomes[trainingRowIdx]; double distance = DistanceMeasure.Distance(rowVector, trainingRow); var distanceDto = new RowIndexDistanceDto <TPredictionResult>(trainingRowIdx, distance, dependentFeatureValue); distances.Add(distanceDto); } var sortedDistances = distances.OrderBy(distDto => distDto.Distance).Take(knnModel.KNeighbors); var result = new RowIndexDistanceDto <TPredictionResult>(queryRowIdx, 0, _resultHandler(sortedDistances, WeightingFunction)); results.Add(result); }); return(results.OrderBy(res => res.RowIndex).Select(res => res.DependentFeatureValue).ToList()); }
private async void OnRunBenchmark(object sender, EventArgs e) { IPredictionModel predictionModel = Global.PredictionModel; if (predictionModel == null) { _messageService.ShowMessage("The prediction model must be loaded first.", "Prediction model", icon: MessageBoxIcon.Information); return; } _benchmarkView.ResetView(); _cancellationTokenSource = new CancellationTokenSource(); CancellationToken token = _cancellationTokenSource.Token; token.Register(() => { _benchmarkView.IsBenchmarkRunning = false; }); try { await Task.Run(() => { RunBenchmark(predictionModel); }, token); } catch (Exception exception) { _loggingService.Log(exception); _messageService.ShowMessage("An error ocurred while running the benchmark. Please try again.", "Benchmark error", icon: MessageBoxIcon.Information); } }
private void RunBenchmark(IPredictionModel model) { _loggingService.Log("Running benchmark has started"); _benchmarkView.IsBenchmarkRunning = true; var provider = new BatchDataProvider(DirectoryHelper.TestLabelsPath, DirectoryHelper.TestImagesPath, 100); var acc = 0; for (var i = 0; i < 100; i++) { if (!_benchmarkView.IsBenchmarkRunning) { break; } MnistImageBatch data = provider.GetData(); int[] predictions = data.Pixels.Select(model.Predict).Select(x => x.ArgMax()).ToArray(); acc += data.Labels.Where((lbl, pred) => lbl == predictions[pred]).Count(); _benchmarkView.PerformProgressStep(); _benchmarkView.DrawGrid(new ImageGridModel(data, predictions)); } _benchmarkView.SetAccuracy(acc); _benchmarkView.IsBenchmarkRunning = false; _loggingService.Log("Running benchmark has completed"); }
protected override void ValidateModel(IPredictionModel model) { if (!(model is IBackwardsEliminationKnnModel <TPredictionResult>)) { throw new ArgumentException("Invalid model passed to Backwards Elimination KNN Predictor!"); } }
public double computeRMSE(Users validationUsers, Items validationItems, IPredictionModel model) { double sse = 0; double actualRating; double predictedRating; int n = 0; foreach (User user in validationUsers) { foreach (string itemId in user.GetRatedItems()) { Item item = validationItems.GetItemById(itemId); actualRating = user.GetRating(itemId); predictedRating = model.Predict(user, item); if (predictedRating == -1) //in case the user and item are not exist the prediction is the average rating { predictedRating = avgRating; } sse += Math.Pow(actualRating - predictedRating, 2); n++; } } return(Math.Sqrt(sse / n)); }
public void addModel(RecommenderSystem.PredictionMethod method, IPredictionModel model) { if (!predictionMethodsDictionary.ContainsKey(method)) { predictionMethodsDictionary.Add(method, model); } }
private static IPredictionModel LoadModel() { var loader = new PredictionModelLoader(); IPredictionModel model = loader.Load(); return(model); }
public IPredictionModel Load() { if (DialogResult.OK != _openFileDialog.ShowDialog() || _openFileDialog.FileNames.Length == 0 || _openFileDialog.FileNames.Any(string.IsNullOrWhiteSpace)) { return(null); } IPredictionModel model = ClusterPredictionModel.FromFiles(_openFileDialog.FileNames); return(model); }
private List <string> GetTopItems(IPredictionModel predictionModel, string sUserId, int cRecommendations) { var currentUser = testUsers.getUserById(sUserId); var currentItems = trainUsers.getUserById(sUserId).GetRatedItems(); //in case the user is also in the training set we want to filter out those rated items from train set var candidateItems = trainItems.GetAllItemsIds().Except(currentItems); //select items that current user is not yet rated var candidateItemsDic = candidateItems.ToDictionary(item => item, item => predictionModel.Predict(currentUser, trainItems.GetItemById(item))); var orderByPrediction = candidateItemsDic.OrderByDescending(item => item.Value); return(orderByPrediction.Select(item => item.Key).Take(cRecommendations).ToList()); }
public IList<TPredictionVal> Predict(IDataFrame queryDataFrame, IPredictionModel model, string dependentFeatureName) { if (!(model is IRandomForestModel)) { throw new ArgumentException("Invalid model passed to Random Forest Predictor"); } var randomForestModel = (IRandomForestModel)model; var weightedPredictons = new Tuple<IList<TPredictionVal>, double>[randomForestModel.DecisionTrees.Count]; Parallel.For( 0, weightedPredictons.Length, i => { var predictions = decisionTreePredictor.Predict( queryDataFrame, randomForestModel.DecisionTrees[i], dependentFeatureName); var weight = 1 - randomForestModel.OutOfBagErrors[i]; weightedPredictons[i] = new Tuple<IList<TPredictionVal>, double>(predictions, weight); }); var predictionVotes = new Dictionary<int, IDictionary<TPredictionVal, double>>(); foreach (var weightedPrediction in weightedPredictons) { for (int rowIdx = 0; rowIdx < queryDataFrame.RowCount; rowIdx++) { var predictedVal = weightedPrediction.Item1[rowIdx]; var weight = useVotingWeightedByOob ? weightedPrediction.Item2 : 1.0; if (!predictionVotes.ContainsKey(rowIdx)) { predictionVotes.Add(rowIdx, new Dictionary<TPredictionVal, double>()); } if (!predictionVotes[rowIdx].ContainsKey(predictedVal)) { predictionVotes[rowIdx].Add(predictedVal, 0.0); } predictionVotes[rowIdx][predictedVal] += weight; } } var results = predictionVotes.Select( rowVotes => rowVotes.Value.OrderByDescending(weightedPredictions => weightedPredictions.Value).First().Key) .ToList(); return results; }
private void OnClassifyDrawing(object sender, EventArgs e) { try { _loggingService.Log("Classify drawing has started"); var imagePreprocessor = new ImagePreprocessor(); IPredictionModel predictionModel = Global.PredictionModel; Image img = _slidingWindowView.Drawing; foreach (Size windowSize in WindowSizes) { foreach (BoundingBox boundingBox in ImageUtilities.SlidingWindow(img, windowSize, 112)) { try { double[] pixels = imagePreprocessor.Preprocess(boundingBox.Image); double[] prediction = predictionModel.Predict(pixels); // If classification is over 99% draw a bounding box at this location int predicted = prediction.ArgMax(); double predictedAccuracy = prediction[prediction.ArgMax()]; if (predictedAccuracy >= 0.95) { _slidingWindowView.DrawBoundingBox(boundingBox, predicted, predictedAccuracy); } } catch (Exception exception) { _loggingService.Log(exception); } } } _loggingService.Log("Classify drawing has completed"); } catch (Exception exception) { _loggingService.Log(exception); _messageService.ShowMessage("An error ocurred while classyfing the drawing. Please try again.", "Classification error", icon: MessageBoxIcon.Information); } }
public Dictionary <PredictionMethod, double> ComputeRMSE(List <PredictionMethod> lMethods, int cTrials = 0) { Console.WriteLine("***************** Model Evaluation *********************"); Dictionary <RecommenderSystem.PredictionMethod, double> results = new Dictionary <PredictionMethod, double>(); foreach (var method in lMethods) { IPredictionModel model = predictionEngine.getModel(method); if (model != null) { var rmse = evaluationEngine.computeRMSE(testUsers, testItems, model); Console.WriteLine(String.Format("Model: {0}, RMSE: {1}", method, rmse)); results.Add(method, rmse); } } Console.WriteLine("*************************************************************"); return(results); }
public IList<double> Predict(IDataFrame queryDataFrame, IPredictionModel model, string dependentFeatureName) { if (!(model is ILinearRegressionModel)) { throw new ArgumentException("Invalid model passed to Linear Regression predictor!"); } var linearRegressionModel = model as ILinearRegressionModel; var xMatrix = queryDataFrame.GetSubsetByColumns( queryDataFrame.ColumnNames.Except(new[] { dependentFeatureName }).ToList()) .GetAsMatrixWithIntercept(); var results = new List<double>(); for (int rowIdx = 0; rowIdx < xMatrix.RowCount; rowIdx++) { var queryRow = xMatrix.Row(rowIdx); var result = linearRegressionModel.Weights.DotProduct(queryRow); results.Add(result); } return results; }
public IList <TDecisionValue> Predict(IDataFrame queryDataFrame, IPredictionModel model, string dependentFeatureName) { if (!(model is IDecisionTreeNode)) { throw new ArgumentException("Invalid model passed to Decision Tree Predictor"); } var results = new ConcurrentBag <Tuple <int, TDecisionValue> >(); var queryDataFrameWithoutDependentFeature = queryDataFrame.GetSubsetByColumns( queryDataFrame.ColumnNames.Except(new[] { dependentFeatureName }).ToList()); for (int rowIdx = 0; rowIdx < queryDataFrameWithoutDependentFeature.RowCount; rowIdx++) { IDataVector <TDecisionValue> dataVector = queryDataFrameWithoutDependentFeature.GetRowVector <TDecisionValue>(rowIdx); Tuple <TDecisionValue, double> predictionResults = ProcessInstance(dataVector, (IDecisionTreeNode)model, 1.0); results.Add(new Tuple <int, TDecisionValue>(rowIdx, predictionResults.Item1)); } return(results.OrderBy(tpl => tpl.Item1).Select(tpl => tpl.Item2).ToList()); }
public IList <double> Predict(IDataFrame queryDataFrame, IPredictionModel model, string dependentFeatureName) { if (!(model is ILinearRegressionModel)) { throw new ArgumentException("Invalid model passed to Linear Regression predictor!"); } var linearRegressionModel = model as ILinearRegressionModel; var xMatrix = queryDataFrame.GetSubsetByColumns( queryDataFrame.ColumnNames.Except(new[] { dependentFeatureName }).ToList()) .GetAsMatrixWithIntercept(); var results = new List <double>(); for (int rowIdx = 0; rowIdx < xMatrix.RowCount; rowIdx++) { var queryRow = xMatrix.Row(rowIdx); var result = linearRegressionModel.Weights.DotProduct(queryRow); results.Add(result); } return(results); }
public IList <IDataQualityReport <TPredictionResult> > CrossValidate( IPredictionModelBuilder modelBuilder, IModelBuilderParams modelBuilderParams, IPredictor <TPredictionResult> predictor, IDataQualityMeasure <TPredictionResult> qualityMeasure, IDataFrame dataFrame, string dependentFeatureName, double percetnagOfTrainData, int folds) { var trainingDataCount = (int)Math.Round(percetnagOfTrainData * dataFrame.RowCount); var testDataCount = dataFrame.RowCount - trainingDataCount; var shuffledAllIndices = dataFrame.RowIndices.Shuffle(_randomizer); var maxWindowsCount = dataFrame.RowCount / testDataCount; var iterationAccuracies = new List <IDataQualityReport <TPredictionResult> >(); var currentWindowNo = 0; for (var i = 0; i < folds; i++) { if (currentWindowNo == maxWindowsCount) { currentWindowNo = 0; shuffledAllIndices = shuffledAllIndices.Shuffle(); } var offset = currentWindowNo * testDataCount; var trainingIndices = shuffledAllIndices.Skip(offset).Take(trainingDataCount).ToList(); var trainingData = dataFrame.GetSubsetByRows(trainingIndices); var testIndices = shuffledAllIndices.Except(trainingIndices).ToList(); var testData = dataFrame.GetSubsetByRows(testIndices); IPredictionModel model = modelBuilder.BuildModel(trainingData, dependentFeatureName, modelBuilderParams); IList <TPredictionResult> predictions = predictor.Predict(testData, model, dependentFeatureName); IList <TPredictionResult> expected = testData.GetColumnVector <TPredictionResult>(dependentFeatureName); IDataQualityReport <TPredictionResult> qualityReport = qualityMeasure.GetReport(expected, predictions); iterationAccuracies.Add(qualityReport); currentWindowNo++; } return(iterationAccuracies); }
public Dictionary <PredictionMethod, double> ComputeRMSE(List <PredictionMethod> lMethods, out Dictionary <PredictionMethod, Dictionary <PredictionMethod, double> > dConfidence) { // compute RMSE Dictionary <PredictionMethod, double> results = new Dictionary <PredictionMethod, double>(); foreach (var method in lMethods) { IPredictionModel model = predictionEngine.getModel(method); if (model != null) { var rmse = evaluationEngine.computeRMSE(testUsers, testItems, model); results.Add(method, rmse); } } // compute dConfidence dConfidence = new Dictionary <PredictionMethod, Dictionary <PredictionMethod, double> >(); foreach (var method in lMethods) { dConfidence.Add(method, new Dictionary <PredictionMethod, double>()); } List <Tuple <PredictionMethod, PredictionMethod> > methodPairs = DataUtils.getAllPairedCombinations(lMethods); foreach (var methodPair in methodPairs) { PredictionMethod method1 = methodPair.Item1; PredictionMethod method2 = methodPair.Item2; Tuple <double, double> pApB = evaluationEngine.computeConfidence(testUsers, testItems, predictionEngine.getModel(method1), predictionEngine.getModel(method2)); dConfidence[method1].Add(method2, pApB.Item1); dConfidence[method2].Add(method1, pApB.Item2); } return(results); }
public IList<double> Predict(IDataFrame queryDataFrame, IPredictionModel model, int dependentFeatureIndex) { return Predict(queryDataFrame, model, queryDataFrame.ColumnNames[dependentFeatureIndex]); }
public BaseModelMethod(IPredictionModel model) { this.model = (MatrixFactorizationModel)model; }
public IList <TDecisionValue> Predict(IDataFrame queryDataFrame, IPredictionModel model, int dependentFeatureIndex) { return(this.Predict(queryDataFrame, model, queryDataFrame.ColumnNames[dependentFeatureIndex])); }
public IList <double> Predict(IDataFrame queryDataFrame, IPredictionModel model, int dependentFeatureIndex) { return(Predict(queryDataFrame, model, queryDataFrame.ColumnNames[dependentFeatureIndex])); }
public IList <TPredictionResult> Predict(IDataFrame queryDataFrame, IPredictionModel model, string dependentFeatureName) { return(Predict(queryDataFrame, model, queryDataFrame.ColumnNames.IndexOf(dependentFeatureName))); }
public Tuple <double, double> computeConfidence(Users validationUsers, Items validationItems, IPredictionModel modelA, IPredictionModel modelB) { double aCounter = 0; double bCounter = 0; double aPrediction; double bPrediction; double aError; double bError; double actualRating; // calcualte number wins for each model foreach (User user in validationUsers) { foreach (string itemId in user.GetRatedItems()) { Item item = validationItems.GetItemById(itemId); actualRating = user.GetRating(itemId); aPrediction = modelA.Predict(user, item); bPrediction = modelB.Predict(user, item); aError = Math.Abs(actualRating - aPrediction); bError = Math.Abs(actualRating - bPrediction); if (aError < bError) { aCounter++; } else if (aError > bError) { bCounter++; } else { aCounter += 0.5; bCounter += 0.5; } } } int n = (int)(aCounter + bCounter); // calcualte pA double sum = 0; for (int i = (int)aCounter; i < n; i++) { sum += MathUtils.Factorial(n) / (MathUtils.Factorial(n - i) * MathUtils.Factorial(i)); } double pA = (1 - Math.Pow(0.5, n) * sum); // calculate pB sum = 0; for (int i = (int)bCounter; i < n; i++) { sum += MathUtils.Factorial(n) / (MathUtils.Factorial(n - i) * MathUtils.Factorial(i)); } double pB = (1 - Math.Pow(0.5, n) * sum); return(Tuple.Create(pA, pB)); }