/// <summary> /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization /// </summary> private static void Evaluate() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkRegressionFile); var analyst = new EncogAnalyst(); analyst.Load(Config.AnalystRegressionFile.ToString()); var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateRegressionFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); using (var file = new System.IO.StreamWriter(Config.ValidationRegressionResult.ToString())) { foreach (var item in evaluationSet) { var NormalizedActualoutput = (BasicMLData)network.Compute(item.Input); var Actualoutput = analyst.Script.Normalize.NormalizedFields[8].DeNormalize(NormalizedActualoutput.Data[0]); var IdealOutput = analyst.Script.Normalize.NormalizedFields[8].DeNormalize(item.Ideal[0]); //Write to File var resultLine = IdealOutput.ToString() + "," + Actualoutput.ToString(); file.WriteLine(resultLine); Console.WriteLine("Ideal : {0}, Actual : {1}", IdealOutput, Actualoutput); } } }
public static IMLDataSet LoadAndNormalizeData(FileInfo fileInfo, AnalystGoal problemType, NormalizationAction normalizationType, bool randomize = true) { var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.Goal = problemType; wizard.Wizard(fileInfo, true, AnalystFileFormat.DecpntComma); var fields = analyst.Script.Normalize.NormalizedFields; if (problemType == AnalystGoal.Classification) fields[fields.Count - 1].Action = normalizationType; var norm = new AnalystNormalizeCSV(); norm.Analyze(fileInfo, true, CSVFormat.DecimalPoint, analyst); var normalizedDataFileInfo = new FileInfo("temp/temp.csv"); norm.Normalize(normalizedDataFileInfo); var inputNeurons = fields.Count - 1; int outputNeurons; if (problemType == AnalystGoal.Classification) outputNeurons = fields.Last().Classes.Count - (normalizationType == NormalizationAction.Equilateral ? 1 : 0); else outputNeurons = fields.Count - inputNeurons; var result = CSVHelper.LoadCSVToDataSet(normalizedDataFileInfo, inputNeurons, outputNeurons, randomize); normalizedDataFileInfo.Delete(); return result; }
public void TestRegression() { FileInfo rawFile = TEMP_DIR.CreateFile("simple.csv"); FileInfo egaFile = TEMP_DIR.CreateFile("simple.ega"); FileInfo outputFile = TEMP_DIR.CreateFile("simple_output.csv"); FileUtil.CopyResource("Encog.Resources.simple.csv", rawFile); FileUtil.CopyResource("Encog.Resources.simple-r.ega", egaFile); EncogAnalyst analyst = new EncogAnalyst(); analyst.Load(egaFile); analyst.ExecuteTask("task-full"); ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English); while (csv.Next()) { double diff = Math.Abs(csv.GetDouble(2) - csv.GetDouble(4)); Assert.IsTrue(diff < 1.5); } Assert.AreEqual(4, analyst.Script.Fields.Length); Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count); csv.Close(); }
public void TestWizard() { FileInfo rawFile = TEMP_DIR.CreateFile("iris_raw.csv"); FileUtil.CopyResource("Encog.Resources.iris.csv", rawFile); FileInfo analystFile = TEMP_DIR.CreateFile("iris.ega"); EncogAnalyst encog = new EncogAnalyst(); AnalystWizard wiz = new AnalystWizard(encog); wiz.Goal = AnalystGoal.Classification; wiz.Wizard(rawFile, true, AnalystFileFormat.DecpntComma); encog.ExecuteTask("task-full"); encog.Save(analystFile); encog.Load(analystFile); AnalystReport report = new AnalystReport(encog); report.ProduceReport(TEMP_DIR.CreateFile("report.html")); Assert.AreEqual(5, encog.Script.Normalize.NormalizedFields.Count); Assert.AreEqual(4.3, encog.Script.Fields[0].Min, 0.001); Assert.AreEqual(7.9, encog.Script.Fields[0].Max, 0.001); Assert.AreEqual(5.8483221477, encog.Script.Fields[0].Mean, 0.001); Assert.AreEqual(encog.Script.Fields[0].Class, false); Assert.AreEqual(encog.Script.Fields[0].Real, true); Assert.AreEqual(encog.Script.Fields[0].Integer, false); Assert.AreEqual(encog.Script.Fields[0].Complete, true); Assert.AreEqual(-3.38833, encog.Script.Normalize.NormalizedFields[0].Normalize(0.001), 0.001); }
private void getLearningSet(object sender, RoutedEventArgs e) { string filename1, filename2; var analyst = new EncogAnalyst(); Microsoft.Win32.OpenFileDialog dlg = new Microsoft.Win32.OpenFileDialog(); // Set filter for file extension and default file extension dlg.DefaultExt = ".csv"; dlg.Filter = "CSV Files |*.csv"; // Display OpenFileDialog by calling ShowDialog method Nullable<bool> result = dlg.ShowDialog(); // Get the selected file name and display in a TextBox if (result == true) { // Open document filename1 = dlg.FileName; filename2 = System.IO.Path.ChangeExtension(filename1, "ega"); dataNormalization(filename1, filename2); analyst.Load(new FileInfo("stats.ega")); int input = analyst.DetermineUniqueInputFieldCount(); int output = analyst.DetermineUniqueOutputFieldCount(); networkGenerate(input, output); } }
public void Execute(IExampleInterface app) { if (app.Args.Length != 2) { Console.WriteLine(@"Note: This example assumes that headers are present in the CSV files."); Console.WriteLine(@"NormalizeFile [input file] [target file]"); } else { var sourceFile = new FileInfo(app.Args[0]); var targetFile = new FileInfo(app.Args[1]); var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma); DumpFieldInfo(analyst); var norm = new AnalystNormalizeCSV(); norm.Analyze(sourceFile, true, CSVFormat.English, analyst); norm.ProduceOutputHeaders = true; norm.Normalize(targetFile); EncogFramework.Instance.Shutdown(); } }
public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { base.InputFilename = inputFile; if ((((uint) headers) & 0) != 0) { goto Label_0063; } Label_005C: base.ExpectInputHeaders = headers; Label_0063: base.InputFormat = format; base.Analyzed = true; this._x554f16462d8d4675 = theAnalyst; base.PerformBasicCounts(); if (((uint) headers) >= 0) { this._x146688677da5adf5 = base.InputHeadings.Length; this._x1402a42b31a31090 = this._x554f16462d8d4675.DetermineOutputFieldCount(); this._xc5416b6511261016 = new CSVHeaders(base.InputHeadings); this._x7acb8518c8ed6133 = new TimeSeriesUtil(this._x554f16462d8d4675, false, this._xc5416b6511261016.Headers); if (0 != 0) { goto Label_005C; } } }
public AnalystWizard(EncogAnalyst theAnalyst) { if (15 == 0) { return; } this._x7047063a9bee4054 = true; Label_0071: this._xc24b506a94383a44 = false; this._x34231b3d9a1591be = true; this._x9b10ace6509508c0 = NormalizeRange.NegOne2One; this._x554f16462d8d4675 = theAnalyst; this._x594135906c55045c = this._x554f16462d8d4675.Script; this._xa24f4208aa2278f4 = WizardMethodType.FeedForward; this._x0768e2edc97194de = ""; if (8 != 0) { this._x29c8e5bee3cb25f8 = AnalystGoal.Classification; this._xb6540cd895237850 = 0; if (1 != 0) { this._x654428e3563552e3 = 0; this._x0236ea04f9fa4aaa = false; this._x771edacf1be2c386 = new DiscardMissing(); } else { goto Label_0071; } } }
public void TestRegression() { FileInfo rawFile = TEMP_DIR.CreateFile("simple.csv"); FileInfo egaFile = TEMP_DIR.CreateFile("simple.ega"); FileInfo outputFile = TEMP_DIR.CreateFile("simple_output.csv"); FileUtil.CopyResource("Encog.Resources.simple.csv", rawFile); FileUtil.CopyResource("Encog.Resources.simple-r.ega", egaFile); EncogAnalyst analyst = new EncogAnalyst(); analyst.Load(egaFile); analyst.ExecuteTask("task-full"); ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English); while (csv.Next()) { double diff = Math.Abs(csv.GetDouble(2) - csv.GetDouble(4)); Assert.IsTrue(diff < 1.5); } Assert.AreEqual(4, analyst.Script.Fields.Length); Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count); csv.Close(); }
public void TestWizard() { FileInfo rawFile = TEMP_DIR.CreateFile("iris_raw.csv"); FileUtil.CopyResource("Encog.Resources.iris.csv", rawFile); FileInfo analystFile = TEMP_DIR.CreateFile("iris.ega"); EncogAnalyst encog = new EncogAnalyst(); AnalystWizard wiz = new AnalystWizard(encog); wiz.Goal = AnalystGoal.Classification; wiz.Wizard(rawFile, true, AnalystFileFormat.DecpntComma); encog.ExecuteTask("task-full"); encog.Save(analystFile); encog.Load(analystFile); AnalystReport report = new AnalystReport(encog); report.ProduceReport(TEMP_DIR.CreateFile("report.html")); Assert.AreEqual(5, encog.Script.Normalize.NormalizedFields.Count); Assert.AreEqual(4.3, encog.Script.Fields[0].Min, 0.001); Assert.AreEqual(7.9, encog.Script.Fields[0].Max, 0.001); Assert.AreEqual(5.84333, encog.Script.Fields[0].Mean, 0.001); Assert.AreEqual(encog.Script.Fields[0].Class, false); Assert.AreEqual(encog.Script.Fields[0].Real, true); Assert.AreEqual(encog.Script.Fields[0].Integer, false); Assert.AreEqual(encog.Script.Fields[0].Complete, true); Assert.AreEqual(-3.38833, encog.Script.Normalize.NormalizedFields[0].Normalize(0.001), 0.001); }
/// <summary> /// Constructs a new CSVData object, needs a CSV file to open /// </summary> /// <param name="fileName">Full path to the CSV file</param> public CSVData(string fileName) { // Open file in read mode and create new stream reader object var reader = new StreamReader(File.OpenRead(fileName)); // Read each row into memory while (!reader.EndOfStream) { String line = reader.ReadLine(); // Remove double quotes line = Regex.Replace(line, "\"", ""); m_data.Add(line.Split(',').ToList()); } reader.Close(); // Count the number of input and output nodes CountNodes(); m_fileName = fileName; // Check to see if there is an analyst file FileInfo analyst = new FileInfo(String.Format("{0}.ega", fileName.Split('.')[0])); if (analyst.Exists) { m_analyst = new EncogAnalyst(); m_analyst.Load(analyst); } }
public void TestClassification() { FileInfo rawFile = TEMP_DIR.CreateFile("simple.csv"); FileInfo egaFile = TEMP_DIR.CreateFile("simple.ega"); FileInfo outputFile = TEMP_DIR.CreateFile("simple_output.csv"); FileUtil.CopyResource("Encog.Resources.simple.csv", rawFile); FileUtil.CopyResource("Encog.Resources.simple-c.ega", egaFile); EncogAnalyst analyst = new EncogAnalyst(); analyst.AddAnalystListener(new ConsoleAnalystListener()); analyst.Load(egaFile); analyst.ExecuteTask("task-full"); ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English); while (csv.Next()) { Assert.AreEqual(csv.Get(3), csv.Get(4)); } Assert.AreEqual(4, analyst.Script.Fields.Length); Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count); csv.Close(); }
public void TestClassification() { FileInfo rawFile = TEMP_DIR.CreateFile("simple.csv"); FileInfo egaFile = TEMP_DIR.CreateFile("simple.ega"); FileInfo outputFile = TEMP_DIR.CreateFile("simple_output.csv"); FileUtil.CopyResource("Encog.Resources.simple.csv", rawFile); FileUtil.CopyResource("Encog.Resources.simple-c.ega", egaFile); EncogAnalyst analyst = new EncogAnalyst(); analyst.AddAnalystListener(new ConsoleAnalystListener()); analyst.Load(egaFile); analyst.ExecuteTask("task-full"); ReadCSV csv = new ReadCSV(outputFile.ToString(), true, CSVFormat.English); while (csv.Next()) { Assert.AreEqual(csv.Get(3), csv.Get(4)); } Assert.AreEqual(4, analyst.Script.Fields.Length); Assert.AreEqual(3, analyst.Script.Fields[3].ClassMembers.Count); csv.Close(); }
public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat) { if (stat.Classify) { int classNumber = stat.DetermineMode(analyst); return stat.Encode(classNumber); } DataField field = analyst.Script.FindDataField(stat.Name); return new double[] { field.Mean }; }
public AnalystTestingUtility(String theBaseDataFile) { _tempDir.ClearContents(); BaseDataFile = theBaseDataFile; _rawFile = _tempDir.CreateFile("test.csv"); FileUtil.CopyResource(theBaseDataFile, _rawFile); _analystFile = _tempDir.CreateFile("test.ega"); EncogAnalyst = new EncogAnalyst(); FileFormat = AnalystFileFormat.DecpntComma; }
public AnalystTestingUtility(String theBaseDataFile) { _tempDir.ClearContents(); BaseDataFile = theBaseDataFile; _rawFile = _tempDir.CreateFile("test.csv"); FileUtil.CopyResource(theBaseDataFile, _rawFile); _analystFile = _tempDir.CreateFile("test.ega"); EncogAnalyst = new EncogAnalyst(); FileFormat = AnalystFileFormat.DecpntComma; }
public void Wizard(AnalystGoal goal, WizardMethodType methodType, bool headers) { EncogAnalyst.MaxIteration = MaxIterations; var wiz = new AnalystWizard(EncogAnalyst) { Goal = goal, MethodType = methodType, EvidenceSegements = 3 }; wiz.Wizard(_rawFile, headers, FileFormat); EncogAnalyst.Save(_analystFile); EncogAnalyst.Load(_analystFile); }
private static void Normalise() { var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma); //for numerical vals: analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Normalize; //for enumerations: analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.Equilateral; }
public static void Normalise(FileOps fileOps) { var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.Wizard(fileOps.BaseFile,true,AnalystFileFormat.DecpntComma); var norm = new AnalystNormalizeCSV{ProduceOutputHeaders = true}; norm.Analyze(fileOps.TrainingFile, true, CSVFormat.English, analyst); norm.Normalize(fileOps.NormalisedTrainingFile); norm.Analyze(fileOps.EvaluationFile, true, CSVFormat.English, analyst); norm.Normalize(fileOps.NormalisedEvaluationFile); analyst.Save(fileOps.AnalystFile); }
public static void DumpFieldInfo(EncogAnalyst analyst) { Console.WriteLine(@"Fields found in file:"); foreach (AnalystField field in analyst.Script.Normalize.NormalizedFields) { var line = new StringBuilder(); line.Append(field.Name); line.Append(",action="); line.Append(field.Action); line.Append(",min="); line.Append(field.ActualLow); line.Append(",max="); line.Append(field.ActualHigh); Console.WriteLine(line.ToString()); } }
private void dataNormalization(string f1, string f2) { var sourceFile = new FileInfo(f1); var targetFile = new FileInfo(f2); var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.Wizard(sourceFile, true, AnalystFileFormat.DecpntComma); var norm = new AnalystNormalizeCSV(); norm.Analyze(sourceFile, true, CSVFormat.English, analyst); norm.ProduceOutputHeaders = true; norm.Normalize(targetFile); analyst.Save(new FileInfo("stats.ega")); analyst.Load(new FileInfo("stats.ega")); }
public void Analyze(EncogAnalyst theAnalyst, FileInfo inputFile, bool headers, CSVFormat format) { object[] objArray; base.InputFilename = inputFile; base.ExpectInputHeaders = headers; base.InputFormat = format; this._x554f16462d8d4675 = theAnalyst; if ((((uint) headers) - ((uint) headers)) >= 0) { base.Analyzed = true; if (0 == 0) { base.PerformBasicCounts(); this._x43f451310e815b76 = this._x554f16462d8d4675.DetermineInputCount(); this._x98cf41c6b0eaf6ab = this._x554f16462d8d4675.DetermineOutputCount(); this._xb52d4a98fad404da = base.InputHeadings.Length - this._x43f451310e815b76; while ((base.InputHeadings.Length != this._x43f451310e815b76) && (base.InputHeadings.Length != (this._x43f451310e815b76 + this._x98cf41c6b0eaf6ab))) { objArray = new object[7]; if (0 != 0) { goto Label_0064; } objArray[0] = "Invalid number of columns("; if (0 == 0) { objArray[1] = base.InputHeadings.Length; objArray[2] = "), must match input("; objArray[3] = this._x43f451310e815b76; objArray[4] = ") count or input+output("; objArray[5] = this._x43f451310e815b76 + this._x98cf41c6b0eaf6ab; goto Label_005C; } if (0x7fffffff == 0) { return; } } return; } return; } Label_005C: objArray[6] = ") count."; Label_0064: throw new AnalystError(string.Concat(objArray)); }
public void Process(double maxError) { int cycles = 0; double e; EncogAnalyst.AddAnalystListener(new ConsoleAnalystListener()); do { EncogAnalyst.ExecuteTask("task-full"); e = CalculateError(); cycles++; Debug.WriteLine(cycles + ": Error = " + e); } while (cycles <= MaxCycles && e > maxError); Assert.IsTrue(cycles <= MaxCycles, "Too many cycles to perform successful train."); }
public TimeSeriesUtil(EncogAnalyst theAnalyst, bool includeOutput, IEnumerable<string> headings) { this._x554f16462d8d4675 = theAnalyst; this._x60382b076ae7366a = this._x554f16462d8d4675.LagDepth; if (-1 != 0) { } this._xdb45501d49a9da70 = this._x554f16462d8d4675.LeadDepth; this._x7ab03f26e84400e9 = (this._x60382b076ae7366a + this._xdb45501d49a9da70) + 1; this._x7e648b416c264559 = includeOutput ? this._x554f16462d8d4675.DetermineTotalColumns() : this._x554f16462d8d4675.DetermineTotalInputFieldCount(); this._x1a429c90ca24e96e = this._x554f16462d8d4675.DetermineInputCount() + this._x554f16462d8d4675.DetermineOutputCount(); int num = 0; foreach (string str in headings) { this._x3f320d9b568c80aa[str.ToUpper()] = num++; } }
/// <summary> /// Metodo responsavel por avaliar a rede neural treinada com a massa de testes criada no metodo Segregate e normalizada no metodo Normalization /// </summary> private static void Evaluate() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile); var analyst = new EncogAnalyst(); analyst.Load(Config.AnalystClassificationFile.ToString()); var evaluationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedEvaluateClassificationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); int count = 0; int CorrectCount = 0; foreach (var item in evaluationSet) { count++; var output = network.Compute(item.Input); var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]); var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]); var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]); var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]); int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count; double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh; double normalizationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow; var eq = new Encog.MathUtil.Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = eq.Decode(output); var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name; var idealClassInt = eq.Decode(item.Ideal); var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name; if (predictedClassInt == idealClassInt) { CorrectCount++; } Console.WriteLine("Count :{0} Properties [{1},{2},{3},{4}] ,Ideal : {5} Predicted : {6} ", count, sepal_l, sepal_w, petal_l, petal_w, idealClass, predictedClass); } Console.WriteLine("Quantidade de itens: {0}", count); Console.WriteLine("Quantidade de acertos: {0}", CorrectCount); Console.WriteLine("Porcentagem de acertos: {0}", ((CorrectCount * 100.0) / count)); }
public static void Evaluate(FileOps fileOps) { var network = (BasicNetwork) EncogDirectoryPersistence.LoadObject(fileOps.TrainedNeuralNetworkFile); var analyst = new EncogAnalyst(); analyst.Load(fileOps.AnalystFile); var evaluationSet = EncogUtility.LoadCSV2Memory(fileOps.NormalisedEvaluationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var iteration = 0; var hitCount = 0; foreach (var evaluation in evaluationSet) { iteration++; var output = network.Compute(evaluation.Input); var sepalL = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(evaluation.Input[0]); var sepalW = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(evaluation.Input[1]); var petalL = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(evaluation.Input[2]); var petalW = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(evaluation.Input[3]); var classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count; var normalisationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh; var normalisationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow; var eq = new Equilateral(classCount, normalisationHigh, normalisationLow); var predictedClassInt = eq.Decode(output); var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name; var idealClassInt = eq.Decode(evaluation.Ideal); var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name; Console.WriteLine("Predicted: {0} Ideal: {1}",predictedClass,idealClass); if (predictedClass == idealClass) { hitCount++; } } Console.WriteLine("Total Test Count:{0}",iteration); Console.WriteLine("Total Correct Predictions: {0}",hitCount); Console.WriteLine("Success rate: {0}%", (((float)hitCount/(float)iteration)*100f)); Console.ReadKey(); }
/// <summary> /// Generate based on the provided Encog Analyst. /// </summary> /// <param name="theAnalyst">The Encog analyst to base this on.</param> public void Generate(EncogAnalyst theAnalyst) { string fileContent = ResourceLoader.LoadString(TemplatePath); using (var reader = new StringReader(fileContent)) { string line; while ((line = reader.ReadLine()) != null) { if (line.StartsWith("~~")) { ProcessToken(line.Substring(2).Trim()); } else { contents.Append(line); contents.Append("\n"); } } } }
public void IrisExample(FileInfo dir) { Console.WriteLine("Starting Iris dataset example."); Uri url = new Uri(IRIS_SOURCE); FileInfo analystFile = FileUtil.CombinePath(dir, "iris.ega"); FileInfo rawFile = FileUtil.CombinePath(dir, "iris_raw.csv"); EncogAnalyst encog = new EncogAnalyst(); encog.AddAnalystListener(new ConsoleAnalystListener()); AnalystWizard wiz = new AnalystWizard(encog); //wiz.TaskRandomize = false; wiz.Wizard(url, analystFile, rawFile, false, AnalystFileFormat.DecpntComma); encog.Save(analystFile); encog.ExecuteTask("task-full"); AnalystReport report = new AnalystReport(encog); report.ProduceReport(FileUtil.CombinePath(dir, "report.html")); }
public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat) { double num; int num2; double[] numArray = new double[stat.ColumnsNeeded]; if (2 != 0) { num = stat.NormalizedHigh - (stat.NormalizedHigh - (stat.NormalizedLow / 2.0)); num2 = 0; goto Label_001A; } Label_0012: numArray[num2] = num; num2++; Label_001A: if (num2 < numArray.Length) { goto Label_0012; } return numArray; }
public void ForestExample(FileInfo dir) { Console.WriteLine("Starting forest cover dataset example."); Uri url = new Uri(FOREST_SOURCE); FileInfo analystFile = FileUtil.CombinePath(dir, "forest.ega"); FileInfo rawFile = FileUtil.CombinePath(dir, "forest_raw.csv"); EncogAnalyst encog = new EncogAnalyst(); encog.AddAnalystListener(new ConsoleAnalystListener()); AnalystWizard wiz = new AnalystWizard(encog); wiz.TaskBalance = true; wiz.Wizard(url, analystFile, rawFile, false, AnalystFileFormat.DecpntComma); encog.ExecuteTask("task-full"); encog.Save(analystFile); AnalystReport report = new AnalystReport(encog); report.ProduceReport(FileUtil.CombinePath(dir, "report.html")); }
/// <summary> /// Program entry point. /// </summary> /// <param name="app">Holds arguments and other info.</param> public void Execute(IExampleInterface app) { Console.WriteLine("Running wizard..."); var analyst = new EncogAnalyst(); var wizard = new AnalystWizard(analyst); wizard.TargetFieldName = "field:1"; wizard.Wizard(sourceCSV, false, AnalystFileFormat.DecpntComma); // customer id analyst.Script.Normalize.NormalizedFields[0].Action = Encog.Util.Arrayutil.NormalizationAction.PassThrough; var norm = new AnalystNormalizeCSV(); norm.Report = new ConsoleStatusReportable(); Console.WriteLine("Analyze for normalize..."); norm.Analyze(sourceCSV, false, CSVFormat.English, analyst); norm.ProduceOutputHeaders = true; Console.WriteLine("Normalize..."); norm.Normalize(targetCSV); analyst.Save(scriptEGA); }
/** * Generate the code from Encog Analyst. * * @param analyst * The Encog Analyst object to use for code generation. */ public void Generate(EncogAnalyst analyst) { if (targetLanguage == TargetLanguage.MQL4 || targetLanguage == TargetLanguage.NinjaScript) { if (!EmbedData) { throw new AnalystCodeGenerationError( "MQL4 and Ninjascript must be embedded."); } } if (generator is IProgramGenerator) { String methodID = analyst.Script.Properties.GetPropertyString(ScriptProperties.MlConfigMachineLearningFile); String trainingID = analyst.Script.Properties.GetPropertyString(ScriptProperties.MlConfigTrainingFile); FileInfo methodFile = analyst.Script.ResolveFilename(methodID); FileInfo trainingFile = analyst.Script.ResolveFilename(trainingID); Generate(methodFile, trainingFile); } else { ((ITemplateGenerator) generator).Generate(analyst); } }
public AnalystReportBridge(EncogAnalyst theAnalyst) { this._x554f16462d8d4675 = theAnalyst; }
public double[] HandleMissing(EncogAnalyst analyst, AnalystField stat) { return null; }
public void Process(EncogAnalyst target) { string text1; int num; int num2; string str; bool flag; bool flag2; bool flag3; AnalyzedField field3; int num3; IList<AnalystClassItem> analyzedClassMembers; IList<AnalystClassItem> classMembers; int num4; DataField[] fieldArray; int num5; AnalyzedField[] fieldArray2; int num6; AnalyzedField[] fieldArray3; int num7; AnalyzedField[] fieldArray4; int num8; CSVFormat format = ConvertStringConst.ConvertToCSVFormat(this._x5786461d089b10a0); ReadCSV dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format); Label_0676: if (dcsv.Next()) { if (this._xa942970cc8a85fd4 == null) { this.xd2a854890d89a856(dcsv); } num = 0; while (num < dcsv.ColumnCount) { if (this._xa942970cc8a85fd4 != null) { this._xa942970cc8a85fd4[num].Analyze1(dcsv.Get(num)); } num++; } if (((uint) num2) >= 0) { goto Label_0676; } } else if (this._xa942970cc8a85fd4 != null) { fieldArray2 = this._xa942970cc8a85fd4; } else { if ((((uint) num8) & 0) == 0) { goto Label_05F5; } goto Label_05D0; } if ((((uint) num2) - ((uint) flag2)) >= 0) { for (num6 = 0; num6 < fieldArray2.Length; num6++) { fieldArray2[num6].CompletePass1(); } goto Label_05F5; } goto Label_05D0; Label_0011: num5++; if (((uint) num2) < 0) { goto Label_0251; } Label_002C: if (num5 < fieldArray.Length) { fieldArray[num5] = this._xa942970cc8a85fd4[num5].FinalizeField(); if ((((uint) num6) + ((uint) num)) <= uint.MaxValue) { goto Label_0011; } if ((((uint) num8) | 3) != 0) { goto Label_00E8; } } else { if (((uint) flag2) > uint.MaxValue) { goto Label_0336; } target.Script.Fields = fieldArray; return; } Label_00A6: if (this._xa942970cc8a85fd4.Length == target.Script.Fields.Length) { num3 = 0; goto Label_00EE; } if ((((uint) flag3) & 0) != 0) { goto Label_0248; } Label_00D7: fieldArray = new DataField[this._xa942970cc8a85fd4.Length]; if ((((uint) num6) + ((uint) num4)) >= 0) { num5 = 0; goto Label_002C; } goto Label_0011; Label_00E8: num3++; Label_00EE: if (num3 < this._xa942970cc8a85fd4.Length) { this._xa942970cc8a85fd4[num3].Name = target.Script.Fields[num3].Name; if (!this._xa942970cc8a85fd4[num3].Class) { goto Label_00E8; } analyzedClassMembers = this._xa942970cc8a85fd4[num3].AnalyzedClassMembers; classMembers = target.Script.Fields[num3].ClassMembers; if (classMembers.Count != analyzedClassMembers.Count) { goto Label_00E8; } num4 = 0; if (((uint) num2) > uint.MaxValue) { goto Label_0341; } goto Label_0195; } goto Label_00D7; Label_018F: num4++; Label_0195: if (num4 < classMembers.Count) { if (analyzedClassMembers[num4].Code.Equals(classMembers[num4].Code)) { analyzedClassMembers[num4].Name = classMembers[num4].Name; } goto Label_018F; } goto Label_00E8; Label_0238: if (num8 < fieldArray4.Length) { field3 = fieldArray4[num8]; if ((((uint) num) & 0) != 0) { goto Label_02FF; } if (field3.Class) { if (flag) { goto Label_0350; } if (((uint) num4) <= uint.MaxValue) { goto Label_03E6; } goto Label_040F; } goto Label_0251; } if (target.Script.Fields != null) { goto Label_00A6; } if ((((uint) num8) + ((uint) num5)) > uint.MaxValue) { goto Label_0341; } goto Label_00D7; Label_0248: if (field3.Integer && (field3.AnalyzedClassMembers.Count <= 2)) { if ((((uint) num6) - ((uint) num5)) >= 0) { if ((((uint) num7) | 4) == 0) { goto Label_059B; } field3.Class = false; } else { if ((((uint) flag2) + ((uint) num8)) >= 0) { goto Label_0350; } goto Label_02FF; } } Label_0251: num8++; goto Label_0238; Label_02FF: if (!flag2 && (field3.Real && !field3.Integer)) { field3.Class = false; } goto Label_0248; Label_030B: if ((((uint) flag3) - ((uint) num2)) >= 0) { goto Label_02FF; } Label_0341: while (!field3.Real) { field3.Class = false; goto Label_02FF; Label_0336: if (field3.Integer) { goto Label_030B; } } goto Label_0368; Label_0350: if (flag3) { goto Label_02FF; } if ((((uint) num5) & 0) == 0) { goto Label_0336; } Label_0368: if ((((uint) flag) - ((uint) num4)) >= 0) { goto Label_02FF; } goto Label_030B; Label_03E6: if (!field3.Integer) { goto Label_0350; } Label_040F: field3.Class = false; if ((((uint) num7) + ((uint) flag)) < 0) { goto Label_05F5; } if ((((uint) num3) + ((uint) flag)) > uint.MaxValue) { goto Label_04D6; } if ((((uint) num3) & 0) == 0) { goto Label_0350; } goto Label_03E6; Label_04A8: dcsv.Close(); Label_04AE: text1 = this._x594135906c55045c.Properties.GetPropertyString("SETUP:CONFIG_allowedClasses"); if (text1 != null) { str = text1; } else { if (0 != 0) { goto Label_02FF; } str = ""; } flag = str.Contains("int"); if (str.Contains("real")) { } flag2 = true; flag3 = str.Contains("string"); fieldArray4 = this._xa942970cc8a85fd4; num8 = 0; goto Label_0238; Label_04D6: if (num7 >= fieldArray3.Length) { } AnalyzedField field2 = fieldArray3[num7]; if (((uint) num4) < 0) { goto Label_04AE; } field2.CompletePass2(); num7++; if ((((uint) num4) + ((uint) num2)) < 0) { goto Label_018F; } if ((((uint) num3) + ((uint) flag)) >= 0) { goto Label_04D6; } Label_0554: if (((uint) flag) > uint.MaxValue) { goto Label_059B; } goto Label_04D6; if ((((uint) num2) | 0x7fffffff) != 0) { if ((((uint) flag2) - ((uint) num3)) >= 0) { goto Label_04A8; } goto Label_05D0; } Label_058B: num2++; Label_0591: if (num2 < dcsv.ColumnCount) { goto Label_05D0; } Label_059B: if (dcsv.Next()) { num2 = 0; goto Label_0591; } if (this._xa942970cc8a85fd4 != null) { fieldArray3 = this._xa942970cc8a85fd4; num7 = 0; goto Label_0554; } goto Label_04A8; Label_05D0: if (this._xa942970cc8a85fd4 != null) { this._xa942970cc8a85fd4[num2].Analyze2(dcsv.Get(num2)); } goto Label_058B; Label_05F5: dcsv.Close(); if (((uint) num) >= 0) { } dcsv = new ReadCSV(this._xb41a802ca5fde63b, this._x94e6ca5ac178dbd0, format); goto Label_059B; }
public CmdRandomize(EncogAnalyst analyst) : base(analyst) { }