public Matrix <float> testingMLP(Matrix <float> testData, string modelName, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym) { Matrix <float> finalResult = null; layerSize = new Matrix <int>(new int[] { testData.Cols, hiddenLayers, 1 }); try { using (ANN_MLP network1 = new ANN_MLP()) // Testing trainned Data { network1.SetActivationFunction(activationType); network1.SetLayerSizes(layerSize); network1.Read(new FileStorage(modelName + ".xml", FileStorage.Mode.Read).GetFirstTopLevelNode()); // Load trainned ANN weights IInputArray Sample_test = testData; IOutputArray Result = new Matrix <float>(1, 1); network1.Predict(Sample_test, Result); //Start Network prediction finalResult = (Matrix <float>)Result; return(finalResult); } } catch (Exception ee) { return(finalResult); } }
private void btnLoadBP_Click(object sender, EventArgs e) { OpenFileDialog ofd = new OpenFileDialog(); ofd.Filter = "XML文件(*.xml)|*.xml"; if (ofd.ShowDialog() == System.Windows.Forms.DialogResult.OK) { bp.Read(new FileStorage(ofd.FileName, FileStorage.Mode.Read).GetRoot()); MessageBox.Show("导入完成"); } }
private void annTraining() { string finalOutput = ""; int features = 16; int classes = 26; Matrix <int> layers = new Matrix <int>(6, 1); layers[0, 0] = features; layers[1, 0] = classes * 16; layers[2, 0] = classes * 8; layers[3, 0] = classes * 4; layers[4, 0] = classes * 2; layers[5, 0] = classes; FileStorage fileStorageRead = new FileStorage(@"ANN_Model.xml", FileStorage.Mode.Read); ann.Read(fileStorageRead.GetRoot(0)); ann.SetLayerSizes(layers); ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0); ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0); ann.Train(allFeatureOfSample, DataLayoutType.RowSample, annAllResponse); FileStorage fileStorageWrite = new FileStorage(@"ANN_Model.xml", FileStorage.Mode.Write); ann.Write(fileStorageWrite); Matrix <float> testSample = new Matrix <float>(1, 16); for (int q = 0; q < 16; q++) { testSample[0, q] = allFeatureOfSample[12, q]; } float real = ann.Predict(testSample); finalOutput += labelArray[(int)real]; label5.Text = finalOutput.ToString(); SpeechSynthesizer reader1 = new SpeechSynthesizer(); if (label5.Text != " ") { reader1.Dispose(); reader1 = new SpeechSynthesizer(); reader1.SpeakAsync(finalOutput.ToString()); } else { MessageBox.Show("No Text Present!"); } System.IO.File.WriteAllText(@"ANNResult.txt", real.ToString()); }
bool Read(string filename) { FileStorage fs = new FileStorage(filename, FileStorage.Mode.Read); if (!fs.IsOpened) { return(false); } ANN_MLP network = new ANN_MLP(); network.Read(fs.GetRoot()); values.Clear(); //for (var iter = fs["values"]..begin(); iter != fs["values"].end(); iter++) //{ // values.push_back(*iter); //} fs.ReleaseAndGetString(); return(true); }
public bool trainingMLP(Matrix <float> inputData, Matrix <float> outputData, string modelName, int iteration = 1000, double learningRate = 0.01, int hiddenLayers = 2, ANN_MLP.AnnMlpActivationFunction activationType = ANN_MLP.AnnMlpActivationFunction.SigmoidSym, double backpropWeightScale = 0.1, double backpropMomentumScale = 0.2) { try { layerSize = new Matrix <int>(new int[] { inputData.Cols, hiddenLayers, 1 });// Integer vector specifying the number of neurons in each layer including the input and output layers. The very first element specifies the number of elements in the input layer. The last element - number of elements in the output layer. IInputArray sample_in = inputData; IInputArray response = outputData; //=========================================================== using (ANN_MLP network = new ANN_MLP()) { network.SetActivationFunction(activationType); network.SetLayerSizes(layerSize); network.TermCriteria = new MCvTermCriteria(iteration, learningRate); // Number of Iteration for training network.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop); network.BackpropWeightScale = backpropWeightScale; network.BackpropMomentumScale = backpropMomentumScale; //network.Save("tmp.xml"); // Save temp weights to file for correction before training ActivationFunctionHardFix(network); // Fix min max values network.Read(new FileStorage("tmp.xml", FileStorage.Mode.Read).GetFirstTopLevelNode()); // Read Fixed values for training TrainData training = new TrainData(sample_in, Emgu.CV.ML.MlEnum.DataLayoutType.RowSample, response); // Creating training data network.Train(training); // Start Training network.Save(modelName + ".xml"); } return(true); } catch (Exception ee) { return(false); } }
private async void moduleFeatureExtraction(int first, int last) { string fghfh = ""; double[,] RawData = new double[16, 3780]; int mid = (first + last) / 2; int low = mid - 8;; int high = mid + 8; for (int i = 0; i < 16; i++) { for (int j = 0; j < 26; j++) { if (j == adasas) { response[i, j] = 1; } if (j != adasas) { response[i, j] = 0; } } } adasas++; if (low < first) { low++; } if (high > last) { low++; } int length = high - low; for (int k = (low); k < (high); k++) { string frameName = "gesture//" + k + ".jpeg"; Image <Bgr, byte> featurExtractionInput = new Image <Bgr, byte>(frameName); //pictureBox3.Image = featurExtractionInput.Bitmap; //label4.Text = k.ToString(); await Task.Delay(1000 / Convert.ToInt32(2)); float[] desc = new float[3780]; desc = GetVector(featurExtractionInput); int i = k - (low); for (int j = 0; j < 3780; j++) { double val = Convert.ToDouble(desc[j]); RawData.SetValue(val, i, j); } if (k == (high - 1)) { Matrix <Double> DataMatrix = new Matrix <Double>(RawData); Matrix <Double> Mean = new Matrix <Double>(1, 3780); Matrix <Double> EigenValues = new Matrix <Double>(1, 3780); Matrix <Double> EigenVectors = new Matrix <Double>(3780, 3780); CvInvoke.PCACompute(DataMatrix, Mean, EigenVectors, 16); Matrix <Double> result = new Matrix <Double>(16, 16); CvInvoke.PCAProject(DataMatrix, Mean, EigenVectors, result); String filePath = @"test.xml"; StringBuilder sb = new StringBuilder(); (new XmlSerializer(typeof(Matrix <double>))).Serialize(new StringWriter(sb), result); XmlDocument xDoc = new XmlDocument(); xDoc.LoadXml(sb.ToString()); System.IO.File.WriteAllText(filePath, sb.ToString()); Matrix <double> matrix = (Matrix <double>)(new XmlSerializer(typeof(Matrix <double>))).Deserialize(new XmlNodeReader(xDoc)); string djf = null; djf = System.IO.File.ReadAllText(@"g.txt"); djf += Environment.NewLine; djf += Environment.NewLine; for (int p = 0; p < 16; p++) { for (int q = 0; q < 16; q++) { djf += p + " , " + q + " " + matrix[p, q].ToString() + " "; } djf += Environment.NewLine; } Matrix <float> masjhdb = result.Convert <float>(); TrainData trainData = new TrainData(masjhdb, DataLayoutType.RowSample, response); int features = 16; int classes = 26; Matrix <int> layers = new Matrix <int>(6, 1); layers[0, 0] = features; layers[1, 0] = classes * 16; layers[2, 0] = classes * 8; layers[3, 0] = classes * 4; layers[4, 0] = classes * 2; layers[5, 0] = classes; ANN_MLP ann = new ANN_MLP(); FileStorage fileStorageRead = new FileStorage(@"abc.xml", FileStorage.Mode.Read); ann.Read(fileStorageRead.GetRoot(0)); ann.SetLayerSizes(layers); ann.SetActivationFunction(ANN_MLP.AnnMlpActivationFunction.SigmoidSym, 0, 0); ann.SetTrainMethod(ANN_MLP.AnnMlpTrainMethod.Backprop, 0, 0); ann.Train(masjhdb, DataLayoutType.RowSample, response); FileStorage fileStorageWrite = new FileStorage(@"abc.xml", FileStorage.Mode.Write); ann.Write(fileStorageWrite); Matrix <float> hehe = new Matrix <float>(1, 16); for (int q = 0; q < 16; q++) { hehe[0, q] = masjhdb[11, q]; } float real = ann.Predict(hehe); fghfh += array[(int)real]; SpeechSynthesizer reader = new SpeechSynthesizer(); if (richTextBox1.Text != " ") { reader.Dispose(); reader = new SpeechSynthesizer(); reader.SpeakAsync(fghfh.ToString()); } else { MessageBox.Show("No Text Present!"); } richTextBox1.Text = fghfh.ToString(); System.IO.File.WriteAllText(@"g.txt", real.ToString()); } } }