public void Execute(IExampleInterface app) { string inputFile = "C:\\jth\\iris.csv"; DataNormalization normalize = new DataNormalization(); IInputField a, b, c, d; normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l")); normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w")); normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l")); normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w")); normalize.AddInputField(new InputFieldCSV(false, inputFile, "species")); normalize.AddOutputField(new OutputFieldRangeMapped(a)); normalize.AddOutputField(new OutputFieldRangeMapped(b)); normalize.AddOutputField(new OutputFieldRangeMapped(c)); normalize.AddOutputField(new OutputFieldRangeMapped(d)); //normalize.AddOutputField(new OutputOneOf(1,0)); NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0); normalize.Storage = store; normalize.Report = new ConsoleStatusReportable(); normalize.Process(true); Console.WriteLine(store.DataSet.Count); }
public void Execute(IExampleInterface app) { string inputFile = @"C:\Development\AI\machinelearning\encog-dotnet-core-master\encog-core-test\Resources\iris.csv"; DataNormalization normalize = new DataNormalization(); IInputField a, b, c, d; normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l")); normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w")); normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l")); normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w")); normalize.AddInputField(new InputFieldCSV(false, inputFile, "species")); normalize.AddOutputField(new OutputFieldRangeMapped(a)); normalize.AddOutputField(new OutputFieldRangeMapped(b)); normalize.AddOutputField(new OutputFieldRangeMapped(c)); normalize.AddOutputField(new OutputFieldRangeMapped(d)); //normalize.AddOutputField(new OutputOneOf(1,0)); NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0); normalize.Storage = store; normalize.Report = new ConsoleStatusReportable(); normalize.Process(true); Console.WriteLine(store.DataSet.Count + " Datasets validated"); }
public void Copy(FileInfo source, FileInfo target, int start, int stop, int size) { var inputField = new IInputField[55]; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(target.ToString()) }; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } // load only the part we actually want, i.e. training or eval var segregator2 = new IndexSampleSegregator(start, stop, size); norm.AddSegregator(segregator2); norm.Process(); }
public void Narrow(FileInfo source, FileInfo target, int field, int count) { var inputField = new IInputField[55]; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(target.ToString()) }; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } var segregator = new IntegerBalanceSegregator(inputField[field], count); norm.AddSegregator(segregator); norm.Process(); Console.WriteLine(@"Samples per tree type:"); Console.WriteLine(segregator.DumpCounts()); }
public DataNormalization Step3(bool useOneOf) { Console.WriteLine(@"Step 3: Normalize training data"); IInputField inputElevation; IInputField inputAspect; IInputField inputSlope; IInputField hWater; IInputField vWater; IInputField roadway; IInputField shade9; IInputField shade12; IInputField shade3; IInputField firepoint; var wilderness = new IInputField[4]; var soilType = new IInputField[40]; IInputField coverType; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(_config.NormalizedDataFile.ToString()) }; norm.AddInputField(inputElevation = new InputFieldCSV(true, _config.BalanceFile.ToString(), 0)); norm.AddInputField(inputAspect = new InputFieldCSV(true, _config.BalanceFile.ToString(), 1)); norm.AddInputField(inputSlope = new InputFieldCSV(true, _config.BalanceFile.ToString(), 2)); norm.AddInputField(hWater = new InputFieldCSV(true, _config.BalanceFile.ToString(), 3)); norm.AddInputField(vWater = new InputFieldCSV(true, _config.BalanceFile.ToString(), 4)); norm.AddInputField(roadway = new InputFieldCSV(true, _config.BalanceFile.ToString(), 5)); norm.AddInputField(shade9 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 6)); norm.AddInputField(shade12 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 7)); norm.AddInputField(shade3 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 8)); norm.AddInputField(firepoint = new InputFieldCSV(true, _config.BalanceFile.ToString(), 9)); for (int i = 0; i < 4; i++) { norm.AddInputField(wilderness[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 10 + i)); } for (int i = 0; i < 40; i++) { norm.AddInputField(soilType[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 14 + i)); } norm.AddInputField(coverType = new InputFieldCSV(false, _config.BalanceFile.ToString(), 54)); norm.AddOutputField(new OutputFieldRangeMapped(inputElevation)); norm.AddOutputField(new OutputFieldRangeMapped(inputAspect)); norm.AddOutputField(new OutputFieldRangeMapped(inputSlope)); norm.AddOutputField(new OutputFieldRangeMapped(hWater)); norm.AddOutputField(new OutputFieldRangeMapped(vWater)); norm.AddOutputField(new OutputFieldRangeMapped(roadway)); norm.AddOutputField(new OutputFieldRangeMapped(shade9)); norm.AddOutputField(new OutputFieldRangeMapped(shade12)); norm.AddOutputField(new OutputFieldRangeMapped(shade3)); norm.AddOutputField(new OutputFieldRangeMapped(firepoint)); for (int i = 0; i < 40; i++) { norm.AddOutputField(new OutputFieldDirect(soilType[i])); } if (useOneOf) { BuildOutputOneOf(norm, coverType); } else { BuildOutputEquilateral(norm, coverType); } norm.Process(); return(norm); }