public static NeuralBrain LoadANN(string name) { FileInfo file = new FileInfo(Path.ChangeExtension(name, EXT)); BasicNetwork network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(file); NeuralBrain brain = new NeuralBrain(network); brain.Name = name; return(brain); }
public static void LoadNetworkFromFS(ref NetworkContainer container, string fileName) { try { container.network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(fileName)); } catch { throw new EncogException("Failed to load network from file system."); } }
/// <summary> /// Perform a deep copy. /// Silverlight version. /// </summary> /// <param name="oldObj">The old object.</param> /// <returns>The new object.</returns> static public Object DeepCopy(Object oldObj) { // now make the copy MemoryStream mstream = new MemoryStream(); EncogDirectoryPersistence.SaveObject(mstream, oldObj); // now read it back mstream.Position = 0; Object result = EncogDirectoryPersistence.LoadObject(mstream); mstream.Close(); return(result); }
public int loadNetwork(string name) { try { System.IO.FileInfo info = new System.IO.FileInfo(name); bestMethod = (IMLRegression)EncogDirectoryPersistence.LoadObject(info); return(1); } catch { return(0); } }
private void BtnComprobar_Click(object sender, EventArgs e) { try { if (i0 != null) { this.conect.Open(); OleDbCommand comando = new OleDbCommand(string.Format("select ArchivoNeuronal,Tamano from tbl_firmas where Pnombre = '{0}' and PApellido = '{1}' ", txtNombre.Text, txtApellido.Text), this.conect); OleDbDataReader lector = comando.ExecuteReader(); string dirArchivo = null; if (lector.HasRows) { while (lector.Read()) { dirArchivo = Application.StartupPath + "\\" + lector.GetString(0); tam = lector.GetString(1); } BasicNetwork RedExtraida = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(dirArchivo)); IMLDataSet par = new BasicMLDataSet(E, IDEAL); double Respuesta = RedExtraida.CalculateError(par); if (Respuesta <= .20) { MessageBox.Show("La firma Introducida Tiene un nivel de aceptacion del : " + Convert.ToString(100 - Math.Round(Respuesta, 3) * 100) + "%, la firma ha sido ACEPTADA"); } else { MessageBox.Show("La firma Introducida Tiene un nivel de aceptacion del : " + Convert.ToString(100 - Math.Round(Respuesta, 3) * 100) + "%, la firma ha sido RECHAZADA"); } } else { MessageBox.Show("Nombre y apellido no existe"); } } else { MessageBox.Show("No hay imagen cargada"); } } catch (System.ArgumentException) { MessageBox.Show("Verifique que la imagen tenga un tamaño de " + tam); } this.conect.Close(); }
public BasicNetwork LoadNetwork() { FileInfo file = _config.TrainedNetworkFile; if (!file.Exists) { Console.WriteLine(@"Can't read file: " + file); return(null); } var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(file); return(network); }
/** * GEnerate from a machine learning method. * * @param mainClass * The main class. * @param method * The filename of the method. * @return The newly created node. */ private EncogProgramNode GenerateForMethod( EncogProgramNode mainClass, FileInfo method) { if (EmbedData) { var encodable = (IMLEncodable)EncogDirectoryPersistence .LoadObject(method); var weights = new double[encodable.EncodedArrayLength()]; encodable.EncodeToArray(weights); mainClass.CreateArray("WEIGHTS", weights); } return(mainClass.CreateNetworkFunction("createNetwork", method)); }
public static double Evaluate(FileInfo dataDir, string filename) { FileInfo file = FileUtil.CombinePath(dataDir, Config.NETWORK_FILE); if (!file.Exists) { Console.WriteLine(@"Can't read file: " + file); return(0); } var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(file); MarketMLDataSet data = GrabData(filename); int count = 0; int correct = 0; foreach (IMLDataPair pair in data) { IMLData input = pair.Input; IMLData actualData = pair.Ideal; IMLData predictData = network.Compute(input); double actual = actualData[0]; double predict = predictData[0]; double diff = Math.Abs(predict - actual); Direction actualDirection = DetermineDirection(actual); Direction predictDirection = DetermineDirection(predict); if (actualDirection == predictDirection) { correct++; } count++; Console.WriteLine(@"Day " + count + @":actual=" + Format.FormatDouble(actual, 4) + @"(" + actualDirection + @")" + @",predict=" + Format.FormatDouble(predict, 4) + @"(" + predictDirection + @")" + @",diff=" + diff); } double percent = correct / (double)count; Console.WriteLine(@"Direction correct:" + correct + @"/" + count); Console.WriteLine(@"Directional Accuracy:" + Format.FormatPercent(percent)); return(percent); }
static void Main(string[] args) { IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); BasicNetwork network = EncogUtility.SimpleFeedForward(2, 6, 0, 1, false); EncogUtility.TrainToError(network, trainingSet, 0.01); double error = network.CalculateError(trainingSet); Console.WriteLine($"Error before save to EG: {error}"); EncogDirectoryPersistence.SaveObject(new FileInfo(FILENAME), network); network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(FILENAME)); error = network.CalculateError(trainingSet); Console.WriteLine($"Error after load from EG: {error}"); }
/// <summary> /// Loads an basic network from the specified directory and file. /// You must load the network like this Loadnetwork(directory,file); /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <returns></returns> public static SupportVectorMachine LoadNetwork(string directory, string file, string net) { FileInfo networkFile = FileUtil.CombinePath(new FileInfo(@directory), @file); // network file if (!networkFile.Exists) { Console.WriteLine(@"Can't read file: " + networkFile); return(null); } var network = (SupportVectorMachine)EncogDirectoryPersistence.LoadObject(networkFile); return(network); }
/// <summary> /// Loads an basic network from the specified directory and file. /// You must load the network like this Loadnetwork(file); /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <returns></returns> public static BasicNetwork LoadNetwork(string file) { FileInfo networkFile = new FileInfo(@file); // network file if (!networkFile.Exists) { Console.WriteLine(@"Can't read file: " + networkFile); return(null); } var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(networkFile); return(network); }
static void Main(string[] args) { string ruta_red = "C:\\Users\\soyal\\Downloads\\DatosRNA\\TrainRGBC4to6.csv"; BasicNetwork network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(ruta_red)); double[] Entrada = new double[4] { 520, 1340, 1823, 3742 }; IMLData EntradaN = new BasicMLData(Entrada); IMLData Resultado = network.Compute(EntradaN); double max = 0; int index = 0; for (int j = 0; j < 6; j++) { if (Resultado[j] > max) { max = Resultado[j]; index = j; } } switch (index) { case 0: Console.WriteLine("Rojo"); break; case 1: Console.WriteLine("Naranja"); break; case 2: Console.WriteLine("Amarillo"); break; case 3: Console.WriteLine("Verde"); break; case 4: Console.WriteLine("Azul"); break; case 5: Console.WriteLine("Cafe"); break; } Console.ReadKey(); }
static private BasicNetwork LoadNetwork(FileInfo networkFile, IMLDataSet trainingSet) { if (networkFile.Exists) { Console.WriteLine($"Loading network {networkFile.FullName}"); return((BasicNetwork)EncogDirectoryPersistence.LoadObject(networkFile)); } else { Console.WriteLine("Creating NN."); var network = EncogUtility.SimpleFeedForward(input: trainingSet.InputSize, hidden1: 500, hidden2: 50, output: 3, tanh: true); network.Reset(); return(network); } }
private static BasicNetwork LoadNetwork(FileInfo networkFile) { if (networkFile.Exists) { Console.WriteLine($"Loading network {networkFile.FullName}"); return((BasicNetwork)EncogDirectoryPersistence.LoadObject(networkFile)); } else { Console.WriteLine(@"File not found: " + networkFile.FullName); Console.ReadKey(); Environment.Exit(0); } return(null); }
static void Main(string[] args) { string ruta_red = "C:\\Users\\soyal\\OneDrive - UNIVERSIDAD NACIONAL AUTÓNOMA DE MÉXICO\\Documentos\\2020-2\\InteligenciaArtificial\\Encog\\Train.txt"; double[] Entrada = new double[2] { 10, 10 }; BasicNetwork network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(ruta_red)); IMLData EntradaN = new BasicMLData(Entrada); IMLData output = network.Compute(EntradaN); double prueba = output[0]; Console.WriteLine(prueba); Console.ReadKey(); }
private void TrainNetwork() { network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); crossValidationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedCrossValidationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); train = new ResilientPropagation(network, trainingSet); IterationDataCollection.Clear(); CVIterationDataCollection.Clear(); IterationLogs.Clear(); trainWorker.RunWorkerAsync(); }
private static void TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); var trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); var epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch : {0} Error : {1}", epoch, train.Error); epoch++; } while (train.Error > 0.01); EncogDirectoryPersistence.SaveObject(Config.TrainedNetworkFile, network); }
/// <inheritdoc /> public override sealed bool ExecuteCommand(String args) { // get filenames String evalID = Prop.GetPropertyString( ScriptProperties.MlConfigEvalFile); String resourceID = Prop.GetPropertyString( ScriptProperties.MlConfigMachineLearningFile); String outputID = Prop.GetPropertyString( ScriptProperties.MlConfigOutputFile); EncogLogging.Log(EncogLogging.LevelDebug, "Beginning evaluate raw"); EncogLogging.Log(EncogLogging.LevelDebug, "evaluate file:" + evalID); EncogLogging.Log(EncogLogging.LevelDebug, "resource file:" + resourceID); FileInfo evalFile = Script.ResolveFilename(evalID); FileInfo resourceFile = Script.ResolveFilename(resourceID); FileInfo outputFile = Analyst.Script.ResolveFilename( outputID); var m = (IMLMethod)EncogDirectoryPersistence.LoadObject(resourceFile); if (!(m is IMLRegression)) { throw new AnalystError("The evaluate raw command can only be used with regression."); } var method = (IMLRegression)m; bool headers = Script.ExpectInputHeaders(evalID); var eval = new AnalystEvaluateRawCSV { Script = Script }; Analyst.CurrentQuantTask = eval; eval.Report = new AnalystReportBridge(Analyst); eval.Analyze(Analyst, evalFile, headers, Prop .GetPropertyCSVFormat(ScriptProperties.SetupConfigCSVFormat)); eval.Process(outputFile, method); Analyst.CurrentQuantTask = null; return(eval.ShouldStop()); }
public void Evaluate(FileInfo networkFile, FileInfo analystFile, FileInfo EvaluationFile) { var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork; var analyst = new EncogAnalyst(); analyst.Load(analystFile); var evaluationSet = EncogUtility.LoadCSV2Memory(EvaluationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); int count = 0; int correctCount = 0; foreach (var item in evaluationSet) { var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]); var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]); var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]); var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]); int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count; double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh; double normalizationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow; var output = network.Compute(item.Input); var resulter = new Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = resulter.Decode(output); var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name; var idealClassInt = resulter.Decode(item.Ideal); var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name; if (predictedClassInt == idealClassInt) { ++correctCount; } Console.WriteLine($"Count: {++count} | Ideal: {idealClass} Predicted:{predictedClass}"); } Console.WriteLine($"Total test count: {count}"); Console.WriteLine($"Total correct test count: {correctCount}"); Console.WriteLine($"% Success: {(correctCount*100.0)/count}"); }
public void Train(FileInfo networkFile, FileInfo trainingDataFile) { var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork; var trainingSet = EncogUtility.LoadCSV2Memory(trainingDataFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var trainer = new ResilientPropagation(network, trainingSet); int iter = 1; do { trainer.Iteration(); Console.WriteLine($"\tIteration: {iter++} | Error: {trainer.Error}"); } while (trainer.Error > 0.01); EncogDirectoryPersistence.SaveObject(networkFile, network); }
/// <summary> /// Obtain the ML method. /// </summary> /// <returns>The method.</returns> private IMLMethod ObtainMethod() { String resourceID = Prop.GetPropertyString( ScriptProperties.MlConfigMachineLearningFile); FileInfo resourceFile = Script.ResolveFilename(resourceID); object method = EncogDirectoryPersistence .LoadObject(resourceFile); if (!(method is IMLMethod)) { throw new AnalystError( "The object to be trained must be an instance of MLMethod. " + method.GetType().Name); } return((IMLMethod)method); }
/// <summary> /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization /// </summary> private static void Evaluate() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile); var analyst = new EncogAnalyst(); analyst.Load(Config.AnalystClassificationFile.ToString()); var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateClassificationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); int count = 0; int CorrectCount = 0; foreach (var item in evaluationSet) { count++; var output = network.Compute(item.Input); var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]); var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]); var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]); var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]); int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count; double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh; double normalizationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow; var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = eq.Decode(output); var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name; var idealClassInt = eq.Decode(item.Ideal); var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name; if (predictedClassInt == idealClassInt) { CorrectCount++; } Console.WriteLine("Count :{0} Properties [{1},{2},{3},{4}] ,Ideal : {5} Predicted : {6} ", count, sepal_l, sepal_w, petal_l, petal_w, idealClass, predictedClass); } Console.WriteLine("Quantidade de itens: {0}", count); Console.WriteLine("Quantidade de acertos: {0}", CorrectCount); Console.WriteLine("Porcentagem de acertos: {0}", ((CorrectCount * 100.0) / count)); }
private void EvaluateCommand() { String methodFile = _cmd.Args[0]; String trainingFile = _cmd.Args[1]; String outputFile = _cmd.Args[2]; var method = (IMLRegression)EncogDirectoryPersistence.LoadObject(new FileInfo(methodFile)); bool headers = _cmd.PromptBoolean("headers", true); AnalystFileFormat format1 = ConvertStringConst.String2AnalystFileFormat(_cmd.PromptString("format", "decpnt|comma")); CSVFormat format = ConvertStringConst.ConvertToCSVFormat(format1); var e = new EvaluateRawCSV { Report = new ConsoleStatusReportable() }; e.Analyze(method, new FileInfo(trainingFile), headers, format); e.Process(new FileInfo(outputFile), method); Console.WriteLine(@"Done evaluating file."); }
private void TrainCommand() { String methodFile = _cmd.Args[0]; String trainingFile = _cmd.Args[1]; String type = _cmd.PromptString("type", "rprop"); String args = _cmd.PromptString("args", ""); double maxError = _cmd.PromptDouble("maxError", 0.01); var dataSet = new BufferedMLDataSet(trainingFile); var method = (IMLMethod)EncogDirectoryPersistence.LoadObject(new FileInfo(methodFile)); var factory = new MLTrainFactory(); IMLTrain train = factory.Create(method, dataSet, type, args); _sw.Start(); EncogUtility.TrainToError(train, maxError); Console.WriteLine(@"Saving machine learning method"); EncogDirectoryPersistence.SaveObject(new FileInfo(methodFile), method); }
private static void Evaluate() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); var analyst = new EncogAnalyst(); analyst.Load(Config.AnalystFile.ToString()); var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var count = 0; var correctCount = 0; foreach (var item in evaluationSet) { count++; var output = network.Compute(item.Input); var analystNormalize = analyst.Script.Normalize; var normalizedFields = analystNormalize.NormalizedFields; var sourceElement = normalizedFields[0].DeNormalize(item.Input[0]); var destinationElement = normalizedFields[1].DeNormalize(item.Input[1]); var classField = normalizedFields[2]; var classCount = classField.Classes.Count; var normalizationHigh = classField.NormalizedHigh; var normalizationLow = classField.NormalizedLow; var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = eq.Decode(output); var predictedClass = classField.Classes[predictedClassInt].Name; var idealClassInt = eq.Decode(output); var idealClass = classField.Classes[predictedClassInt].Name; if (predictedClassInt == idealClassInt) { correctCount++; } Console.WriteLine("Count :{0} Properties [{1},{2}] ,Ideal : {3} Predicted : {4}", count, sourceElement, destinationElement, idealClass, predictedClass); } Console.WriteLine("Total Test Count : {0}", count); Console.WriteLine("Total Correct Predicted Count : {0}", correctCount); Console.WriteLine("% Success : {0}", ((correctCount * 100.0) / count)); }
public void Train(bool useGui) { // load, or create the neural network BasicNetwork network; if (!_config.TrainedNetworkFile.Exists) { throw new EncogError(@"Can't find neural network file, please generate data"); } network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(_config.TrainedNetworkFile); // convert training data Console.WriteLine(@"Converting training file to binary"); EncogUtility.ConvertCSV2Binary( _config.NormalizedDataFile.ToString(), CSVFormat.English, _config.BinaryFile.ToString(), network.InputCount, network.OutputCount, false, false); var trainingSet = new BufferedMLDataSet( _config.BinaryFile.ToString()); if (useGui) { EncogUtility.TrainDialog(network, trainingSet); } else { EncogUtility.TrainConsole(network, trainingSet, _config.TrainingMinutes); } Console.WriteLine(@"Training complete, saving network..."); EncogDirectoryPersistence.SaveObject(_config.TrainedNetworkFile, network); }
/// <see cref="INetwork.TrainNetwork"/> public INetwork TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(TrainedNetworkFile); var trainingSet = EncogUtility.LoadCSV2Memory(NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > Rate || epoch < MaxEpochs); Error = train.Error; EncogDirectoryPersistence.SaveObject(TrainedNetworkFile, (BasicNetwork)network); return(this); }
public Form1() { InitializeComponent(); chart1.ChartAreas[0].AxisX.MinorGrid.Enabled = false; chart1.ChartAreas[0].AxisX.MajorGrid.Enabled = false; chart1.ChartAreas[0].AxisY.MinorGrid.Enabled = false; chart1.ChartAreas[0].AxisY.MajorGrid.Enabled = false; string ruta_red = "C:\\Users\\soyal\\OneDrive - UNIVERSIDAD NACIONAL AUTÓNOMA DE MÉXICO\\Documentos\\2020-2\\InteligenciaArtificial\\Encog\\Train.txt"; Red = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(ruta_red)); string ruta_datos = "C:\\Users\\soyal\\OneDrive - UNIVERSIDAD NACIONAL AUTÓNOMA DE MÉXICO\\Documentos\\2020-2\\InteligenciaArtificial\\Encog\\DatosEntrenamiento.csv"; StreamReader lector = new StreamReader(ruta_datos); var lineas = new List <string[]>(); string[] Linea; while (!lector.EndOfStream) { Linea = lector.ReadLine().Split(','); lineas.Add(Linea); } Input = new double[lineas.Count][]; Output = new double[lineas.Count][]; for (int i = 0; i < lineas.Count; i++) { Input[i] = new double[2]; Output[i] = new double[1]; Input[i][0] = Convert.ToDouble(lineas[i][0]); Input[i][1] = Convert.ToDouble(lineas[i][1]); Output[i][0] = Convert.ToDouble(lineas[i][2]); } for (int i = 0; i < lineas.Count / 2; i++) { chart1.Series["Clase1"].Points.AddXY(Input[i][0], Input[i][1]); } for (int i = lineas.Count / 2; i < lineas.Count; i++) { chart1.Series["Clase2"].Points.AddXY(Input[i][0], Input[i][1]); } }
public void EvaluateNetwork(FileInfo trainedNetwork, FileInfo analystFile, FileInfo normalisedTestFile, FileInfo finalResultsFile) { try { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(trainedNetwork); var analyst = new EncogAnalyst(); analyst.Load(analystFile.ToString()); var evaluationSet = EncogUtility.LoadCSV2Memory(normalisedTestFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); using (var file = new StreamWriter(finalResultsFile.ToString())) { foreach (var item in evaluationSet) { var normalizedActualoutput = (BasicMLData)network.Compute(item.Input); //var actualoutput = analyst.Script.Normalize.NormalizedFields[11].DeNormalize(normalizedActualoutput.Data[0]); //var idealOutput = analyst.Script.Normalize.NormalizedFields[11].DeNormalize(item.Ideal[0]); int classCount = analyst.Script.Normalize.NormalizedFields[11].Classes.Count; double normalizationHigh = analyst.Script.Normalize.NormalizedFields[11].NormalizedHigh; double normalizationLow = analyst.Script.Normalize.NormalizedFields[11].NormalizedLow; var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = eq.Decode(normalizedActualoutput); var idealClassInt = eq.Decode(item.Ideal); //Write to File var resultLine = idealClassInt.ToString() + "," + predictedClassInt.ToString(); file.WriteLine(resultLine); Console.WriteLine("Ideal : {0}, Actual : {1}", idealClassInt, predictedClassInt); } } } catch (Exception e) { Console.WriteLine(e); throw; } }
public static void Run() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.NetworkFile); var trainingSet = EncogUtility.LoadEGB2Memory(Config.TrainingFile); while (true) { Propagation train = new ResilientPropagation( network, trainingSet) { ThreadCount = 0, FixFlatSpot = false }; EncogUtility.TrainConsole(train, network, trainingSet, TimeSpan.FromMinutes(10).TotalSeconds); Console.WriteLine("Finished. Saving network..."); EncogDirectoryPersistence.SaveObject(Config.NetworkFile, network); Console.WriteLine(@"Network saved."); } }