public static BPMDataModel CreateNoisyModel(BPMDataModel model, double epsilon) { Random generator = new Random(); BPMDataModel noisyModel = new BPMDataModel(model.GetInputs().Length, model.GetNoOfFeatures()); List <double> ranges = model.GetRanges(); double[] values = new double[model.GetInputs()[0].Count]; List <Vector> x = new List <Vector>(); foreach (Vector vector in model.GetInputs()) { for (int i = 0; i < ranges.Count; i++) { double randomVal = (0.5 - generator.NextDouble()); double newVal = CreateLaplacian(epsilon, 0, vector[i], ranges[i], randomVal); values[i] = newVal; } x.Add(Vector.FromArray(values)); } noisyModel.SetAllVectorFeatures(x.ToArray()); noisyModel.SetClasses(model.GetClasses()); //noisyModel.CalculateFeatureLimits(); //noisyModel.ScaleFeatures(); return(noisyModel); }
private static double RunBPMGeneral(BPMDataModel model, bool addBias, Vector[] testSet, int[] testResults) { int correctCount = 0; VectorGaussian[] posteriorWeights = _bpm.Train(model.GetInputs(), model.GetClasses()); string actualWeights = posteriorWeights[1].ToString(); int breakLocation = actualWeights.IndexOf("\r", StringComparison.Ordinal); actualWeights = actualWeights.Substring(0, breakLocation); if (!_onlyWriteAggregateResults && _writeGaussians) { _writer.WriteLine("Weights= " + actualWeights); } Discrete[] predictions = _bpm.Test(testSet); int i = 0; foreach (Discrete prediction in predictions) { if (FindMaxValPosition(prediction.GetProbs().ToArray()) == testResults[i]) { correctCount++; } i++; } double accuracy = ((double)correctCount / predictions.Length) * 100; //double logEvidence = bpm.GetLogEvidence(); return(accuracy); }
private void begin_Click(object sender, EventArgs e) { if (_maxSensitivity < _startSensitivity) { ShowDialog(@"The maximum sensitivity must be greater than or equal to the minimum sensitivity.", "Error", false); return; } if (!_performingCalcs) { //Disable input changes ChangeStatusOfInputs(false); beginButton.Text = @"Cancel"; _bpm = new BPM(_numOfClasses, _noisePrecision); try { _trainingModel = FileUtils.ReadFile(_trainingFilePath, _labelAtStartOfLine, _noOfFeatures, _addBias); _testModel = FileUtils.ReadFile(_testFilePath, _labelAtStartOfLine, _noOfFeatures, _addBias); _writer = new StreamWriter(_resultsFilePath, _appendToFile); _totalRuns = (int)Math.Ceiling(_noOfRuns * (1 + ((_maxSensitivity - _startSensitivity) / _sensitivityIncrement))); _last = DateTime.Now; bw.WorkerReportsProgress = true; bw.WorkerSupportsCancellation = true; bw.DoWork += bw_DoWork; bw.ProgressChanged += bw_ProgressChanged; bw.RunWorkerCompleted += bw_RunWorkerCompleted; bw.RunWorkerAsync(); progressBar1.Maximum = _totalRuns; _performingCalcs = true; _prevRem = _totalRuns; } catch (Exception exception) { ShowDialog("Sorry, there was an error reading the input data" + exception.GetType(), "Error", true); beginButton.Text = @"Begin processing"; ChangeStatusOfInputs(true); } } else { bw.CancelAsync(); beginButton.Text = @"Begin processing"; //Tidy up statusLabel.Text = @""; } }
public static BPMDataModel CreateNoisyModel(BPMDataModel model, double epsilon) { Random generator = new Random(); BPMDataModel noisyModel = new BPMDataModel(model.GetInputs().Length, model.GetNoOfFeatures()); List<double> ranges = model.GetRanges(); double[] values = new double[model.GetInputs()[0].Count]; List<Vector> x = new List<Vector>(); foreach (Vector vector in model.GetInputs()) { for (int i = 0; i < ranges.Count; i++) { double randomVal = (0.5 - generator.NextDouble()); double newVal = CreateLaplacian(epsilon, 0, vector[i], ranges[i], randomVal); values[i] = newVal; } x.Add(Vector.FromArray(values)); } noisyModel.SetAllVectorFeatures(x.ToArray()); noisyModel.SetClasses(model.GetClasses()); //noisyModel.CalculateFeatureLimits(); //noisyModel.ScaleFeatures(); return noisyModel; }
private static double RunBPMGeneral(BPMDataModel model, bool addBias, Vector[] testSet, int[] testResults) { int correctCount = 0; VectorGaussian[] posteriorWeights = _bpm.Train(model.GetInputs(), model.GetClasses()); string actualWeights = posteriorWeights[1].ToString(); int breakLocation = actualWeights.IndexOf("\r", StringComparison.Ordinal); actualWeights = actualWeights.Substring(0, breakLocation); if (!_onlyWriteAggregateResults && _writeGaussians) _writer.WriteLine("Weights= " + actualWeights); Discrete[] predictions = _bpm.Test(testSet); int i = 0; foreach (Discrete prediction in predictions) { if (FindMaxValPosition(prediction.GetProbs().ToArray()) == testResults[i]) correctCount++; i++; } double accuracy = ((double)correctCount / predictions.Length) * 100; //double logEvidence = bpm.GetLogEvidence(); return accuracy; }
private void begin_Click(object sender, EventArgs e) { if (_maxSensitivity < _startSensitivity) { ShowDialog(@"The maximum sensitivity must be greater than or equal to the minimum sensitivity.", "Error", false); return; } if (!_performingCalcs) { //Disable input changes ChangeStatusOfInputs(false); beginButton.Text = @"Cancel"; _bpm = new BPM(_numOfClasses, _noisePrecision); try { _trainingModel = FileUtils.ReadFile(_trainingFilePath, _labelAtStartOfLine, _noOfFeatures, _addBias); _testModel = FileUtils.ReadFile(_testFilePath, _labelAtStartOfLine, _noOfFeatures, _addBias); _writer = new StreamWriter(_resultsFilePath, _appendToFile); _totalRuns = (int) Math.Ceiling(_noOfRuns*(1 + ((_maxSensitivity - _startSensitivity)/_sensitivityIncrement))); _last = DateTime.Now; bw.WorkerReportsProgress = true; bw.WorkerSupportsCancellation = true; bw.DoWork += bw_DoWork; bw.ProgressChanged += bw_ProgressChanged; bw.RunWorkerCompleted += bw_RunWorkerCompleted; bw.RunWorkerAsync(); progressBar1.Maximum = _totalRuns; _performingCalcs = true; _prevRem = _totalRuns; } catch (Exception exception) { ShowDialog("Sorry, there was an error reading the input data" + exception.GetType(), "Error", true); beginButton.Text = @"Begin processing"; ChangeStatusOfInputs(true); } } else { bw.CancelAsync(); beginButton.Text = @"Begin processing"; //Tidy up statusLabel.Text = @""; } }
public static BPMDataModel ReadFile(string filename, bool labelAtStart, int noOfFeatures, bool addBias) { int noOfInputs = 0; StreamReader temp = new StreamReader(filename); while (!temp.EndOfStream) { temp.ReadLine(); noOfInputs++; } temp.Dispose(); BPMDataModel newModel = new BPMDataModel(noOfInputs, noOfFeatures); List <double[]> featureData = new List <double[]>(); //Holds the upper and lower limits for each feature being analysed List <double[]> limits = new List <double[]>(noOfFeatures); for (int i = 0; i < noOfFeatures; i++) { double[] lim = new double[2]; lim[0] = Double.NaN; lim[1] = Double.NaN; limits.Add(lim); } //Initialise feature data list for (int i = 0; i < noOfFeatures; i++) { featureData.Add(new double[noOfInputs]); } //The variables to hold the data being read in int[] classData = new int[noOfInputs]; List <Vector> x = new List <Vector>(); Vector[] featureVectors; string line; double[] values; int index; int labelIndex; int currentLocation = 0; using (StreamReader reader = new StreamReader(filename)) { while (!reader.EndOfStream) { line = reader.ReadLine(); //Allow for splitting of data in file by either tab, space or comma if (line != null) { string[] pieces = line.Split('\t', ' ', ','); //This assumes class identifier AND ID no on each line int n = pieces.Length - 2; if (addBias) { values = new double[n + 1]; values[n] = 1; } else { values = new double[n]; } // Read feature values and labels. labelIndex = labelAtStart ? 0 : n + 1; for (int i = 0; i < noOfFeatures; i++) { index = labelAtStart ? i + 1 : i; values[i] = Double.Parse(pieces[index]); featureData[i][currentLocation] = Double.Parse(pieces[index]); if ((limits[i][0].Equals(Double.NaN)) || (values[i] < limits[i][0])) { limits[i][0] = values[i]; } if ((limits[i][1].Equals(Double.NaN)) || (values[i] > limits[i][1])) { limits[i][1] = values[i]; } } classData[currentLocation] = Int32.Parse(pieces[labelIndex - 1]); x.Add(Vector.FromArray(values)); } currentLocation++; } //Clean up resources reader.Dispose(); } featureVectors = x.ToArray(); newModel.SetAllVectorFeatures(featureVectors); newModel.SetClasses(classData); newModel.SetInputLimits(limits); return(newModel); }
public static BPMDataModel ReadFile(string filename, bool labelAtStart, int noOfFeatures, bool addBias) { int noOfInputs = 0; StreamReader temp = new StreamReader(filename); while (!temp.EndOfStream) { temp.ReadLine(); noOfInputs++; } temp.Dispose(); BPMDataModel newModel = new BPMDataModel(noOfInputs, noOfFeatures); List<double[]> featureData = new List<double[]>(); //Holds the upper and lower limits for each feature being analysed List<double[]> limits = new List<double[]>(noOfFeatures); for (int i = 0; i < noOfFeatures; i++) { double[] lim = new double[2]; lim[0] = Double.NaN; lim[1] = Double.NaN; limits.Add(lim); } //Initialise feature data list for (int i = 0; i < noOfFeatures; i++) { featureData.Add(new double[noOfInputs]); } //The variables to hold the data being read in int[] classData = new int[noOfInputs]; List<Vector> x = new List<Vector>(); Vector[] featureVectors; string line; double[] values; int index; int labelIndex; int currentLocation = 0; using (StreamReader reader = new StreamReader(filename)) { while (!reader.EndOfStream) { line = reader.ReadLine(); //Allow for splitting of data in file by either tab, space or comma if (line != null) { string[] pieces = line.Split('\t', ' ', ','); //This assumes class identifier AND ID no on each line int n = pieces.Length - 2; if (addBias) { values = new double[n + 1]; values[n] = 1; } else { values = new double[n]; } // Read feature values and labels. labelIndex = labelAtStart ? 0 : n + 1; for (int i = 0; i < noOfFeatures; i++) { index = labelAtStart ? i + 1 : i; values[i] = Double.Parse(pieces[index]); featureData[i][currentLocation] = Double.Parse(pieces[index]); if ((limits[i][0].Equals(Double.NaN)) || (values[i] < limits[i][0])) { limits[i][0] = values[i]; } if ((limits[i][1].Equals(Double.NaN)) || (values[i] > limits[i][1])) { limits[i][1] = values[i]; } } classData[currentLocation] = Int32.Parse(pieces[labelIndex - 1]); x.Add(Vector.FromArray(values)); } currentLocation++; } //Clean up resources reader.Dispose(); } featureVectors = x.ToArray(); newModel.SetAllVectorFeatures(featureVectors); newModel.SetClasses(classData); newModel.SetInputLimits(limits); return newModel; }