private void saveMixtureWeightsBinary(GaussianWeights gaussianWeights, string text, bool append) { this.logger.info("Saving mixture weights to: "); this.logger.info(text); Properties properties = new Properties(); properties.setProperty("version", "1.0"); if (this.doCheckSum) { properties.setProperty("chksum0", this.checksum); } DataOutputStream dataOutputStream = this.writeS3BinaryHeader(this.location, text, properties, append); int statesNum = gaussianWeights.getStatesNum(); int streamsNum = gaussianWeights.getStreamsNum(); int gauPerState = gaussianWeights.getGauPerState(); this.writeInt(dataOutputStream, statesNum); this.writeInt(dataOutputStream, streamsNum); this.writeInt(dataOutputStream, gauPerState); if (!Sphinx3Saver.assertionsDisabled && streamsNum != 1) { throw new AssertionError(); } int val = gauPerState * statesNum * streamsNum; this.writeInt(dataOutputStream, val); for (int i = 0; i < statesNum; i++) { for (int j = 0; j < streamsNum; j++) { float[] array = new float[gauPerState]; float[] array2 = new float[gauPerState]; for (int k = 0; k < gauPerState; k++) { array2[k] = gaussianWeights.get(i, j, k); } this.logMath.logToLinear(array2, array); this.writeFloatArray(dataOutputStream, array); } } if (this.doCheckSum && !Sphinx3Saver.assertionsDisabled) { int num = 0; bool flag = num != 0; this.doCheckSum = (num != 0); if (!flag) { object obj = "Checksum not supported"; throw new AssertionError(obj); } } dataOutputStream.close(); }
private void saveMixtureWeightsAscii(GaussianWeights gaussianWeights, string text, bool append) { this.logger.info("Saving mixture weights to: "); this.logger.info(text); OutputStream outputStream = StreamFactory.getOutputStream(this.location, text, append); if (outputStream == null) { string text2 = new StringBuilder().append("Error trying to write file ").append(this.location).append(text).toString(); throw new IOException(text2); } PrintWriter printWriter = new PrintWriter(outputStream, true); printWriter.print("mixw "); int statesNum = gaussianWeights.getStatesNum(); printWriter.print(new StringBuilder().append(statesNum).append(" ").toString()); int streamsNum = gaussianWeights.getStreamsNum(); printWriter.print(new StringBuilder().append(streamsNum).append(" ").toString()); int gauPerState = gaussianWeights.getGauPerState(); printWriter.println(gauPerState); for (int i = 0; i < statesNum; i++) { for (int j = 0; j < streamsNum; j++) { printWriter.print(new StringBuilder().append("mixw [").append(i).append(" ").append(j).append("] ").toString()); float[] array = new float[gauPerState]; float[] array2 = new float[gauPerState]; for (int k = 0; k < gauPerState; k++) { array2[k] = gaussianWeights.get(i, j, k); } this.logMath.logToLinear(array2, array); float num = 0f; for (int l = 0; l < gauPerState; l++) { num += array[l]; } printWriter.println(num); printWriter.print("\n\t"); for (int l = 0; l < gauPerState; l++) { printWriter.print(new StringBuilder().append(" ").append(array[l]).toString()); } printWriter.println(); } } outputStream.close(); }
private GaussianWeights initMixtureWeights(int num, int num2, int num3, float num4) { if (!ModelInitializerLoader.assertionsDisabled && num2 != 1) { throw new AssertionError(); } GaussianWeights gaussianWeights = new GaussianWeights("mixtureweights", num, num3, num2); for (int i = 0; i < num; i++) { float[] array = new float[num3]; this.floorData(array, num4); this.normalize(array); this.logMath.linearToLog(array); gaussianWeights.put(i, 0, array); } return(gaussianWeights); }
private Pool createWeightsPoolBuffer(GaussianWeights gaussianWeights) { Pool pool = new Pool(gaussianWeights.getName()); int statesNum = gaussianWeights.getStatesNum(); int streamsNum = gaussianWeights.getStreamsNum(); int gauPerState = gaussianWeights.getGauPerState(); for (int i = 0; i < streamsNum; i++) { for (int j = 0; j < statesNum; j++) { int num = i * statesNum + j; Buffer o = new Buffer(gauPerState, true, num); pool.put(num, o); } } return(pool); }
public SetBasedGaussianMixture(GaussianWeights mixtureWeights, MixtureComponentSet mixtureComponentSet, int id) : base(mixtureWeights, null, id) { this.mixtureComponentSet = mixtureComponentSet; }
static void main(String[] args) { int[] electrode_list; int[] power_bands; if (args == null || args.Length == 0) { electrode_list = new int[19]; for (int i = 0; i < 19; i++) { electrode_list[i] = i; } power_bands = new int[] { 0, 1, 2, 3, 4, 5 }; } else if (args.Length == 1) { String ls = args[0]; electrode_list = ls.Split(',').Select(e => int.Parse(e)).ToArray(); power_bands = new int[] { 0, 1, 2, 3, 4, 5 }; } else { String ls = args[0]; electrode_list = ls.Split(',').Select(e => int.Parse(e)).ToArray(); ls = args[1]; power_bands = ls.Split(',').Select(e => int.Parse(e)).ToArray(); } /* power bands: * 0 = delta (2 - 4) * 1 = theta (5 - 7) * 2 = alpha (8 - 15) * 3 = beta (16 - 31) * 4 = gamma (32 - 49) * 5 = gamma (50 - 120) */ load_inputs(electrode_list, power_bands); //Console.WriteLine("starting training;"); // var nb = new NaiveBayesLearning<NormalDistribution>(); // naive bayes init // var classifier = nb.Learn(classifier_inputs, classifier_tags); // naive bayes shuffle_inputs(new Random()); double[][] outputs = new double[classifier_tags.Length][]; for (int i = 0; i < classifier_tags.Length; i++) { outputs[i] = new double[] { (double)classifier_tags[i] }; } // LinearDiscriminantAnalysis lda = new LinearDiscriminantAnalysis(); // lda init // var classifier = lda.Learn(classifier_inputs, classifier_tags); // lda // neural net /* * var network = new ActivationNetwork(new BipolarSigmoidFunction(), inputsCount: classifier_inputs[0].Length, neuronsCount: new[] { 30, 30, 1 }); * var teacher = new Accord.Neuro.Learning.ParallelResilientBackpropagationLearning(network); * //var teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); * GaussianWeights initializer = new GaussianWeights(network); * initializer.Randomize(); * * * * * var y = outputs; */ // Iterate until stop criteria is met //double error = double.PositiveInfinity; /* * * for (int i = 0; i <25 ; i++) * { * error = teacher.RunEpoch(classifier_inputs, y); * * } */ //Console.WriteLine("done training;"); //int[] answers = classifier_inputs.Apply(network.Compute).GetColumn(0).Apply(System.Math.Sign); // get input set score /* * int correct = 0; * int wrong = 0; * int total = classifier_tags.Length; */ /* * for (var i = 0; i < classifier_inputs.Length; i++) * { * int cls = classifier.Decide(classifier_inputs[i]); * Console.Write("expected: "); * Console.Write(classifier_tags[i]); * Console.Write(" : received: "); * Console.Write(Convert.ToInt32(cls)); * Console.WriteLine(""); * * if (cls == classifier_tags[i]) correct++; * else wrong++; * } */ /* * for (int i = 0; i < classifier_tags.Length; i++) * { * * var ans = (int)Math.Round(network.Compute(classifier_inputs[i])[0]); * * if (ans == classifier_tags[i]) * { * correct++; * } * else wrong++; * } * double acc = (double)correct / (double)total; // training set accuracy */ // cross validation code starts here var cross_validation_accuracies = new List <double>(); List <double[]> arr = classifier_inputs.ToList(); List <double[]> outarr = outputs.ToList(); int folds = 10; int fold_size = classifier_inputs.Length / folds; int cross_correct_total = 0; int cross_wrong_total = 0; int cross_total_total = 0; for (int i = 0; i < folds; i++) { var training_set = new List <double[]>(); var training_tags = new List <double[]>(); var validation_set = new List <double[]>(); var validation_tags = new List <double[]>(); training_set.AddRange(arr.Take(i * fold_size)); training_tags.AddRange(outarr.Take(i * fold_size)); validation_set.AddRange(arr.Skip(i * fold_size).Take(fold_size)); validation_tags.AddRange(outarr.Skip(i * fold_size).Take(fold_size)); training_set.AddRange(arr.Skip(i * fold_size + fold_size)); training_tags.AddRange(outarr.Skip(i * fold_size + fold_size)); var training_array = training_set.ToArray(); var training_array_tags = training_tags.ToArray(); var validation_array = validation_set.ToArray(); var validation_array_tags = validation_tags.ToArray(); var cross_network = new ActivationNetwork(new BipolarSigmoidFunction(), inputsCount: training_array[0].Length, neuronsCount: new[] { 30, 1 }); var cross_teacher = new Accord.Neuro.Learning.ParallelResilientBackpropagationLearning(cross_network); //var teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); GaussianWeights cross_initializer = new GaussianWeights(cross_network); cross_initializer.Randomize(); var z = training_array_tags; double cross_error = double.PositiveInfinity; // training network for (int t = 0; t < 25; t++) { cross_error = cross_teacher.RunEpoch(training_array, z); } // network trained - calculate accuracy. int cross_correct = 0; int cross_wrong = 0; int cross_total = validation_array_tags.Length; for (int t = 0; t < validation_array_tags.Length; t++) { var ans = (int)Math.Round(cross_network.Compute(validation_array[t])[0]); if (ans == validation_array_tags[t][0]) { cross_correct++; } else { cross_wrong++; } } cross_correct_total += cross_correct; cross_wrong_total += cross_wrong; cross_total_total += cross_total; double cross_acc = (double)cross_correct / (double)cross_total; cross_validation_accuracies.Add(cross_acc); } var acc = cross_validation_accuracies.Average(); // write cross_validation_accuracies to a file. Console.WriteLine(cross_validation_accuracies.ToString()); Dictionary <string, object> collection = new Dictionary <string, object>() { { "Accuracy", acc }, { "Fold_Accuracies", cross_validation_accuracies.ToArray() } }; JObject Result = new JObject( new JProperty("metrics", JObject.FromObject(collection) ) ); Console.WriteLine("BEGIN METRICS"); Console.WriteLine(Result.ToString()); Console.WriteLine("END METRICS"); Console.ReadKey(); }