public void Execute(IExampleInterface app) { if (app.Args.Length < 1) { Console.WriteLine(@"Usage: ForestCover [data directory] [generate/train/traingui/evaluate] [e/o]"); } else { try { var config = new ForestConfig(new FileInfo(app.Args[0])); if (String.Compare(app.Args[1], "generate", true) == 0) { if (app.Args.Length < 3) { Console.WriteLine( @"When using generate, you must specify an 'e' or an 'o' as the second parameter."); } else { bool useOneOf = !app.Args[2].ToLower().Equals("e"); Generate(config, useOneOf); } } else if (String.Compare(app.Args[1], "train", true) == 0) { Train(config, false); } else if (String.Compare(app.Args[1], "traingui", true) == 0) { Train(config, true); } else if (String.Compare(app.Args[1], "evaluate", true) == 0) { Evaluate(config); } } /*catch (Exception e) * { * Console.WriteLine(e.StackTrace); * }*/ finally { EncogFramework.Instance.Shutdown(); } } }
public static void Generate(ForestConfig config, bool useOneOf) { var generate = new GenerateData(config); generate.Step1(); generate.Step2(); DataNormalization norm = generate.Step3(useOneOf); // save the normalize object SerializeObject.Save(config.NormalizeFile.ToString(), norm); // create and save the neural network BasicNetwork network = EncogUtility.SimpleFeedForward(norm.GetNetworkInputLayerSize(), config.HiddenCount, 0, norm.GetNetworkOutputLayerSize(), true); EncogDirectoryPersistence.SaveObject(config.TrainedNetworkFile, network); }
public Evaluate(ForestConfig config) { _config = config; }
public TrainNetwork(ForestConfig config) { _config = config; }
public void Execute(IExampleInterface app) { if (app.Args.Length < 1) { Console.WriteLine(@"Usage: ForestCover [data directory] [generate/train/traingui/evaluate] [e/o]"); } else { try { var config = new ForestConfig(new FileInfo(app.Args[0])); if (String.Compare(app.Args[1], "generate", true) == 0) { if (app.Args.Length < 3) { Console.WriteLine( @"When using generate, you must specify an 'e' or an 'o' as the second parameter."); } else { bool useOneOf = !app.Args[2].ToLower().Equals("e"); Generate(config, useOneOf); } } else if (String.Compare(app.Args[1], "train", true) == 0) { Train(config, false); } else if (String.Compare(app.Args[1], "traingui", true) == 0) { Train(config, true); } else if (String.Compare(app.Args[1], "evaluate", true) == 0) { Evaluate(config); } } /*catch (Exception e) { Console.WriteLine(e.StackTrace); }*/ finally { EncogFramework.Instance.Shutdown(); } } }
public static void Evaluate(ForestConfig config) { var evaluate = new Evaluate(config); evaluate.EvaluateNetwork(); }
public static void Train(ForestConfig config, bool useGui) { var program = new TrainNetwork(config); program.Train(useGui); }
public static void Generate(ForestConfig config, bool useOneOf) { var generate = new GenerateData(config); generate.Step1(); generate.Step2(); DataNormalization norm = generate.Step3(useOneOf); // save the normalize object SerializeObject.Save(config.NormalizeFile.ToString(), norm); // create and save the neural network BasicNetwork network = EncogUtility.SimpleFeedForward(norm .GetNetworkInputLayerSize(), config.HiddenCount, 0, norm .GetNetworkOutputLayerSize(), true); EncogDirectoryPersistence.SaveObject(config.TrainedNetworkFile, network); }
public GenerateData(ForestConfig config) { _config = config; }