public void BuildOutputEquilateral(DataNormalization norm, IInputField coverType) { var outType = new OutputEquilateral(); outType.AddItem(coverType, 1); outType.AddItem(coverType, 2); outType.AddItem(coverType, 3); outType.AddItem(coverType, 4); outType.AddItem(coverType, 5); outType.AddItem(coverType, 6); outType.AddItem(coverType, 7); norm.AddOutputField(outType, true); }
public IActionResult DistribuicaoFuncoes(DistribuicaoFuncoesVM distribuicaoFuncoesVM) { List <int> listaAnos = new FiliadosFuncionariosBO(_connectionStrings.DefaultConnection).GetAllDataCargosDisponiveis(); distribuicaoFuncoesVM.selecaoAno = new SelectList(listaAnos); List <FiliadosFuncionariosDTO> listaRegioesFuncoes = new FiliadosFuncionariosBO(_connectionStrings.DefaultConnection).GetDistribuicaoFuncoes(new FiliadosFuncionariosDTO() { Ano = distribuicaoFuncoesVM.anoSelecionado }); distribuicaoFuncoesVM.arrayDados = DataNormalization.NormalizeDistribuicaoFuncoes(listaRegioesFuncoes); return(View(distribuicaoFuncoesVM)); }
public JsonResult GetDistribuicaoFuncoes(int ano) { List <FiliadosFuncionariosDTO> listaRegioesFuncoes = new FiliadosFuncionariosBO(_connectionStrings.DefaultConnection).GetDistribuicaoFuncoes(new FiliadosFuncionariosDTO() { Ano = ano }); if (listaRegioesFuncoes.Count > 0) { return(Json(DataNormalization.NormalizeDistribuicaoFuncoes(listaRegioesFuncoes))); } else { return(Json(null)); } }
public void EvaluateNetwork() { BasicNetwork network = LoadNetwork(); DataNormalization norm = LoadNormalization(); var csv = new ReadCSV(_config.EvaluateFile.ToString(), false, ','); var input = new double[norm.InputFields.Count]; var eqField = (OutputEquilateral)norm.FindOutputField( typeof(OutputEquilateral), 0); int correct = 0; int total = 0; while (csv.Next()) { total++; for (int i = 0; i < input.Length; i++) { input[i] = csv.GetDouble(i); } IMLData inputData = norm.BuildForNetworkInput(input); IMLData output = network.Compute(inputData); int coverTypeActual = DetermineTreeType(eqField, output); int coverTypeIdeal = (int)csv.GetDouble(54) - 1; KeepScore(coverTypeActual, coverTypeIdeal); if (coverTypeActual == coverTypeIdeal) { correct++; } } Console.WriteLine(@"Total cases:" + total); Console.WriteLine(@"Correct cases:" + correct); double percent = correct / (double)total; Console.WriteLine(@"Correct percent:" + Format.FormatPercentWhole(percent)); for (int i = 0; i < 7; i++) { double p = (_treeCorrect[i] / (double)_treeCount[i]); Console.WriteLine(@"Tree Type #" + i + @" - Correct/total: " + _treeCorrect[i] + @"/" + _treeCount[i] + @"(" + Format.FormatPercentWhole(p) + @")"); } }
/// <summary> /// Loads a normalization from the specified directory and file. /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <returns>a datanormalization object</returns> public static DataNormalization LoadNormalization(string directory, string file) { DataNormalization norm = null; FileInfo networkFile = FileUtil.CombinePath(new FileInfo(@directory), @file); if (networkFile.Exists) { norm = (DataNormalization)SerializeObject.Load(networkFile.FullName); } if (norm == null) { Console.WriteLine(@"Can't find normalization resource: " + directory + file); return(null); } return(norm); }
public DataNormalization LoadNormalization() { DataNormalization norm = null; if (_config.NormalizeFile.Exists) { norm = (DataNormalization)SerializeObject.Load(_config.NormalizeFile.ToString()); } if (norm == null) { Console.WriteLine(@"Can't find normalization resource: " + _config.NormalizeFile); return(null); } return(norm); }
public static void Generate(ForestConfig config, bool useOneOf) { var generate = new GenerateData(config); generate.Step1(); generate.Step2(); DataNormalization norm = generate.Step3(useOneOf); // save the normalize object SerializeObject.Save(config.NormalizeFile.ToString(), norm); // create and save the neural network BasicNetwork network = EncogUtility.SimpleFeedForward(norm.GetNetworkInputLayerSize(), config.HiddenCount, 0, norm.GetNetworkOutputLayerSize(), true); EncogDirectoryPersistence.SaveObject(config.TrainedNetworkFile, network); }
public void Copy(FileInfo source, FileInfo target, int start, int stop, int size) { var inputField = new IInputField[55]; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(target.ToString()) }; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } // load only the part we actually want, i.e. training or eval var segregator2 = new IndexSampleSegregator(start, stop, size); norm.AddSegregator(segregator2); norm.Process(); }
public void Narrow(FileInfo source, FileInfo target, int field, int count) { var inputField = new IInputField[55]; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(target.ToString()) }; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } var segregator = new IntegerBalanceSegregator(inputField[field], count); norm.AddSegregator(segregator); norm.Process(); Console.WriteLine(@"Samples per tree type:"); Console.WriteLine(segregator.DumpCounts()); }
public void Execute(IExampleInterface app) { string inputFile = "C:\\jth\\iris.csv"; DataNormalization normalize = new DataNormalization(); IInputField a, b, c, d; normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l")); normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w")); normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l")); normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w")); normalize.AddInputField(new InputFieldCSV(false, inputFile, "species")); normalize.AddOutputField(new OutputFieldRangeMapped(a)); normalize.AddOutputField(new OutputFieldRangeMapped(b)); normalize.AddOutputField(new OutputFieldRangeMapped(c)); normalize.AddOutputField(new OutputFieldRangeMapped(d)); //normalize.AddOutputField(new OutputOneOf(1,0)); NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0); normalize.Storage = store; normalize.Report = new ConsoleStatusReportable(); normalize.Process(true); Console.WriteLine(store.DataSet.Count); }
public void Execute(IExampleInterface app) { string inputFile = @"C:\Development\AI\machinelearning\encog-dotnet-core-master\encog-core-test\Resources\iris.csv"; DataNormalization normalize = new DataNormalization(); IInputField a, b, c, d; normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l")); normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w")); normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l")); normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w")); normalize.AddInputField(new InputFieldCSV(false, inputFile, "species")); normalize.AddOutputField(new OutputFieldRangeMapped(a)); normalize.AddOutputField(new OutputFieldRangeMapped(b)); normalize.AddOutputField(new OutputFieldRangeMapped(c)); normalize.AddOutputField(new OutputFieldRangeMapped(d)); //normalize.AddOutputField(new OutputOneOf(1,0)); NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0); normalize.Storage = store; normalize.Report = new ConsoleStatusReportable(); normalize.Process(true); Console.WriteLine(store.DataSet.Count + " Datasets validated"); }
/// <summary> /// Init the object. /// </summary> /// <param name="normalization">The normalization object that owns this range.</param> public void Init(DataNormalization normalization) { this.normalization = normalization; }
/// <summary> /// Init the object. /// </summary> /// <param name="normalization">The normalization object that owns this range.</param> public void Init(DataNormalization normalization) { _normalization = normalization; }
public DataNormalization Step3(bool useOneOf) { Console.WriteLine(@"Step 3: Normalize training data"); IInputField inputElevation; IInputField inputAspect; IInputField inputSlope; IInputField hWater; IInputField vWater; IInputField roadway; IInputField shade9; IInputField shade12; IInputField shade3; IInputField firepoint; var wilderness = new IInputField[4]; var soilType = new IInputField[40]; IInputField coverType; var norm = new DataNormalization { Report = this, Storage = new NormalizationStorageCSV(_config.NormalizedDataFile.ToString()) }; norm.AddInputField(inputElevation = new InputFieldCSV(true, _config.BalanceFile.ToString(), 0)); norm.AddInputField(inputAspect = new InputFieldCSV(true, _config.BalanceFile.ToString(), 1)); norm.AddInputField(inputSlope = new InputFieldCSV(true, _config.BalanceFile.ToString(), 2)); norm.AddInputField(hWater = new InputFieldCSV(true, _config.BalanceFile.ToString(), 3)); norm.AddInputField(vWater = new InputFieldCSV(true, _config.BalanceFile.ToString(), 4)); norm.AddInputField(roadway = new InputFieldCSV(true, _config.BalanceFile.ToString(), 5)); norm.AddInputField(shade9 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 6)); norm.AddInputField(shade12 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 7)); norm.AddInputField(shade3 = new InputFieldCSV(true, _config.BalanceFile.ToString(), 8)); norm.AddInputField(firepoint = new InputFieldCSV(true, _config.BalanceFile.ToString(), 9)); for (int i = 0; i < 4; i++) { norm.AddInputField(wilderness[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 10 + i)); } for (int i = 0; i < 40; i++) { norm.AddInputField(soilType[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 14 + i)); } norm.AddInputField(coverType = new InputFieldCSV(false, _config.BalanceFile.ToString(), 54)); norm.AddOutputField(new OutputFieldRangeMapped(inputElevation)); norm.AddOutputField(new OutputFieldRangeMapped(inputAspect)); norm.AddOutputField(new OutputFieldRangeMapped(inputSlope)); norm.AddOutputField(new OutputFieldRangeMapped(hWater)); norm.AddOutputField(new OutputFieldRangeMapped(vWater)); norm.AddOutputField(new OutputFieldRangeMapped(roadway)); norm.AddOutputField(new OutputFieldRangeMapped(shade9)); norm.AddOutputField(new OutputFieldRangeMapped(shade12)); norm.AddOutputField(new OutputFieldRangeMapped(shade3)); norm.AddOutputField(new OutputFieldRangeMapped(firepoint)); for (int i = 0; i < 40; i++) { norm.AddOutputField(new OutputFieldDirect(soilType[i])); } if (useOneOf) { BuildOutputOneOf(norm, coverType); } else { BuildOutputEquilateral(norm, coverType); } norm.Process(); return(norm); }
/// <summary> /// Saves a normalization to the specified folder with the specified name. /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <param name="normTosave">The norm tosave.</param> public static void SaveNormalization(string directory, string file, DataNormalization normTosave) { SerializeObject.Save(directory + file, normTosave); }