public DataNormalization CreateIndexSegregate() { IInputField a, b; double[][] arrayOutput = EngineArray.AllocateDouble2D(6, 2); var target = new NormalizationStorageArray2D(arrayOutput); var norm = new DataNormalization(); norm.Report = new NullStatusReportable(); norm.Storage = target; norm.AddInputField(a = new InputFieldArray2D(false, ARRAY_2D, 0)); norm.AddInputField(b = new InputFieldArray2D(false, ARRAY_2D, 1)); norm.AddOutputField(new OutputFieldRangeMapped(a, 0.1, 0.9)); norm.AddOutputField(new OutputFieldRangeMapped(b, 0.1, 0.9)); norm.AddSegregator(new IndexRangeSegregator(0, 3)); return norm; }
private DataNormalization CreateSampleSegregate() { IInputField a, b; var arrayOutput = EngineArray.AllocateDouble2D(6, 2); var target = new NormalizationStorageArray2D(arrayOutput); var norm = new DataNormalization(); norm.Report = new NullStatusReportable(); norm.Storage = target; norm.AddInputField(a = new InputFieldArray2D(false, ARRAY_2D, 0)); norm.AddInputField(b = new InputFieldArray2D(false, ARRAY_2D, 1)); norm.AddOutputField(new OutputFieldRangeMapped(a, 0.1, 0.9)); norm.AddOutputField(new OutputFieldRangeMapped(b, 0.1, 0.9)); norm.AddSegregator(new IndexSampleSegregator(0, 3, 2)); return(norm); }
public void Copy(FileInfo source, FileInfo target, int start, int stop, int size) { var inputField = new IInputField[55]; var norm = new DataNormalization {Report = this, Storage = new NormalizationStorageCSV(target.ToString())}; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } // load only the part we actually want, i.e. training or eval var segregator2 = new IndexSampleSegregator(start, stop, size); norm.AddSegregator(segregator2); norm.Process(); }
private DataNormalization CreateRangeSegregate() { IInputField a, b; double[][] arrayOutput = EngineArray.AllocateDouble2D(1, 2); RangeSegregator s; var target = new NormalizationStorageArray2D(arrayOutput); var norm = new DataNormalization(); norm.Report = new NullStatusReportable(); norm.Storage = target; norm.AddInputField(a = new InputFieldArray2D(false, ARRAY_2D, 0)); norm.AddInputField(b = new InputFieldArray2D(false, ARRAY_2D, 1)); norm.AddOutputField(new OutputFieldRangeMapped(a, 0.1, 0.9)); norm.AddOutputField(new OutputFieldRangeMapped(b, 0.1, 0.9)); norm.AddSegregator(s = new RangeSegregator(a, false)); s.AddRange(2, 2, true); return(norm); }
public void Narrow(FileInfo source, FileInfo target, int field, int count) { var inputField = new IInputField[55]; var norm = new DataNormalization {Report = this, Storage = new NormalizationStorageCSV(target.ToString())}; for (int i = 0; i < 55; i++) { inputField[i] = new InputFieldCSV(true, source.ToString(), i); norm.AddInputField(inputField[i]); IOutputField outputField = new OutputFieldDirect(inputField[i]); norm.AddOutputField(outputField); } var segregator = new IntegerBalanceSegregator(inputField[field], count); norm.AddSegregator(segregator); norm.Process(); Console.WriteLine(@"Samples per tree type:"); Console.WriteLine(segregator.DumpCounts()); }
private DataNormalization CreateRangeSegregate() { IInputField a, b; double[][] arrayOutput = EngineArray.AllocateDouble2D(1, 2); RangeSegregator s; var target = new NormalizationStorageArray2D(arrayOutput); var norm = new DataNormalization(); norm.Report = new NullStatusReportable(); norm.Storage = target; norm.AddInputField(a = new InputFieldArray2D(false, ARRAY_2D, 0)); norm.AddInputField(b = new InputFieldArray2D(false, ARRAY_2D, 1)); norm.AddOutputField(new OutputFieldRangeMapped(a, 0.1, 0.9)); norm.AddOutputField(new OutputFieldRangeMapped(b, 0.1, 0.9)); norm.AddSegregator(s = new RangeSegregator(a, false)); s.AddRange(2, 2, true); return norm; }