/// <summary> /// Create an initial population. /// </summary> /// <param name="rnd">Random number generator.</param> /// <param name="codec">The codec, the type of network to use.</param> /// <returns>The population.</returns> public static IPopulation InitPopulation(IGenerateRandom rnd, RBFNetworkGenomeCODEC codec) { // Create a RBF network to get the length. var network = new RBFNetwork(codec.InputCount, codec.RbfCount, codec.OutputCount); int size = network.LongTermMemory.Length; // Create a new population, use a single species. IPopulation result = new BasicPopulation(PopulationSize, new DoubleArrayGenomeFactory(size)); var defaultSpecies = new BasicSpecies { Population = result }; result.Species.Add(defaultSpecies); // Create a new population of random networks. for (int i = 0; i < PopulationSize; i++) { var genome = new DoubleArrayGenome(size); network.Reset(rnd); Array.Copy(network.LongTermMemory, 0, genome.Data, 0, size); defaultSpecies.Add(genome); } // Set the genome factory to use the double array genome. result.GenomeFactory = new DoubleArrayGenomeFactory(size); return(result); }
/// <summary> /// Generate the RBF network. /// </summary> /// /// <returns>The neural network.</returns> public IMLMethod Generate() { var result = new RBFNetwork(_inputNeurons, _hiddenNeurons, _outputNeurons, _rbfType); return(result); }
public void TestComputeRegression() { var network = new RBFNetwork(2, 1, 1); double[] ltm = { 2.0, // input 1 to RBF 1 2.0, // input 2 to RBF 1 5.0, // RBF width 2.0, // RBF, center-0 4.0, // RBF, center-1 3.0, // RBF1 to Output 1 4.0 }; // Bias to Output 1 Array.Copy(ltm, 0, network.LongTermMemory, 0, ltm.Length); double[] x = { 1, 2 }; double y = network.ComputeRegression(x)[0]; // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*3) + (1*4) = 7 Assert.AreEqual(7, y, AIFH.DefaultPrecision); }
public TimeSeries Predict(RBFNetwork network, NormalizeArray norm, TimeSeries simulatedData) { double[] data = GenerateData(simulatedData); int data_count = simulatedData.Count; TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); ts.Add(simulatedData.TimeStamp(idx), prediction, false); } return(ts); }
public virtual void run() { // get the path to file with data string inputFileName = "data_sets/sine.csv"; // create MultiLayerPerceptron neural network RBFNetwork neuralNet = new RBFNetwork(1, 15, 1); // create training set from file DataSet dataSet = DataSet.createFromFile(inputFileName, 1, 1, ",", false); RBFLearning learningRule = ((RBFLearning)neuralNet.LearningRule); learningRule.LearningRate = 0.02; learningRule.MaxError = 0.01; learningRule.addListener(this); // train the network with training set neuralNet.learn(dataSet); Console.WriteLine("Done training."); Console.WriteLine("Testing network..."); testNeuralNetwork(neuralNet, dataSet); }
static void Main(string[] args) { int dimension = 2; int numNeuronsPerDimension = 4; double volumeNeuronWidth = 2.0 / numNeuronsPerDimension;; bool includeEdgeRBFs = true; RBFNetwork n = new RBFNetwork(dimension, numNeuronsPerDimension, 1, RBFEnum.Gaussian); n.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, volumeNeuronWidth, includeEdgeRBFs); INeuralDataSet trainingSet = new BasicNeuralDataSet(XORInput, XORIdeal); SVDTraining train = new SVDTraining(n, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while ((epoch < 1) && (train.Error > 0.001)); Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = n.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } Console.Read(); }
static void Main(string[] args) { int dimension = 2; // XORInput provides two-dimensional inputs. Not 8. /* * If XORInput is 8 dimensional it should be like this: * * public static double[][] XORInput = { * new[] {0.0, 0.0,0.0, 0.0,0.0, 0.0,0.0, 0.0}, * . * . * .*/ int numNeuronsPerDimension = 4; // could be also 16, 64, 256. I suppose it should accept 8, 32 but it needs additional investigation double volumeNeuronWidth = 2.0 / numNeuronsPerDimension; bool includeEdgeRBFs = true; RBFNetwork n = new RBFNetwork(dimension, numNeuronsPerDimension, 1, RBFEnum.Gaussian); n.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, volumeNeuronWidth, includeEdgeRBFs); //n.RandomizeRBFCentersAndWidths(0, 1, RBFEnum.Gaussian); INeuralDataSet trainingSet = new BasicNeuralDataSet(XORInput, XORIdeal); SVDTraining train = new SVDTraining(n, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while ((epoch < 1) && (train.Error > 0.001)); }
protected override TimeSeries _BuildOutput(TimeSeries simulatedData, object userState = null) { if (mModel == null) { mModel = BuildNetwork(simulatedData, out mNorm); } return(Predict(mModel, mNorm, simulatedData)); }
public override TimeSeries BuildForecasts(TimeSeries simulatedData, List <DateTime> futureTimes) { if (mModel == null) { mModel = BuildNetwork(simulatedData, out mNorm); } return(Forecast(mModel, mNorm, simulatedData, futureTimes)); }
public void TestBasics() { var network = new RBFNetwork(2, 1, 1); // should be 7, (2*1) + (1+(1 bias))*1 + 3 RBF params // 2 + 2 + 3 = 7 Assert.AreEqual(7, network.LongTermMemory.Length); Assert.AreEqual("[RBFNetwork:inputCount=2,outputCount=1,RBFs=[GaussianFunction:width=0.00,center=0.00,0.00],]", network.ToString()); }
/// <summary> /// Perform the example. /// </summary> public void Process() { var trainingData = BasicData.ConvertArrays(XorInput, XorIdeal); var network = new RBFNetwork(2, 5, 1); var score = new ScoreRegressionData(trainingData); var train = new TrainGreedyRandom(true, network, score); PerformIterations(train, 1000000, 0.01, true); Query(network, trainingData); }
/// <summary> /// Prepare a Kaggle submission for Titanic. /// </summary> /// <param name="dataPath">The data path.</param> /// <param name="bestNetwork">The best network.</param> /// <param name="cross">The cross validated data.</param> public void Submit(string dataPath, RBFNetwork bestNetwork, CrossValidate cross) { String now = new DateTime().ToString("yyyyMMddhhmm"); string trainingPath = Path.Combine(dataPath, TitanicConfig.TrainingFilename); string testPath = Path.Combine(dataPath, TitanicConfig.TestFilename); var score = (int)(cross.Score * 10000); string submitPath = Path.Combine(dataPath, "submit-" + now + "_" + score + ".csv"); string submitInfoPath = Path.Combine(dataPath, "submit-" + now + ".txt"); using (var file = new StreamWriter(submitInfoPath)) { file.WriteLine("Crossvalidation stats:"); for (int i = 0; i < cross.Count; i++) { CrossValidateFold fold = cross.Folds[i]; file.WriteLine("Fold #" + (i + 1) + " : Score: " + fold.Score); } file.WriteLine("Average Score: " + cross.Score); file.WriteLine(); file.WriteLine(String.Join(",", bestNetwork.LongTermMemory)); } var stats = new TitanicStats(); NormalizeTitanic.Analyze(stats, trainingPath); NormalizeTitanic.Analyze(stats, testPath); var ids = new List <String>(); IList <BasicData> training = NormalizeTitanic.Normalize(stats, testPath, ids, TitanicConfig.InputNormalizeLow, TitanicConfig.InputNormalizeHigh, TitanicConfig.PredictSurvive, TitanicConfig.PredictPerish); int idx = 0; using (var streamWriter = new StreamWriter(submitPath)) using (var writer = new CsvWriter(streamWriter)) { writer.WriteField("PassengerId"); writer.WriteField("Survived"); writer.NextRecord(); foreach (BasicData data in training) { double[] output = bestNetwork.ComputeRegression(data.Input); int survived = output[0] > 0.5 ? 1 : 0; writer.WriteField(ids[idx]); writer.WriteField(survived); writer.NextRecord(); idx++; } } }
protected virtual RBFNetwork BuildNetwork(TimeSeries simulatedData, out NormalizeArray norm) { double[] data = GenerateData(simulatedData); double[] normalizedData = NormalizeData(data, mNormalizedLow, mNormalzedHigh, out norm); RBFNetwork network = CreateNetwork(); IMLDataSet training = GenerateTraining(normalizedData); Train(network, training); return(network); }
/// <summary> /// Construct the training object. /// </summary> /// /// <param name="network_0">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(RBFNetwork network_0, IMLDataSet training) : base(TrainingImplementationType.OnePass) { if (network_0.OutputCount != 1) { throw new TrainingError( "SVD requires an output layer with a single neuron."); } Training = training; network = network_0; }
/// <summary> /// Create a RBF network. /// </summary> /// /// <param name="architecture">THe architecture string to use.</param> /// <param name="input">The input count.</param> /// <param name="output">The output count.</param> /// <returns>The RBF network.</returns> public IMLMethod Create(String architecture, int input, int output) { IList <String> layers = ArchitectureParse.ParseLayers(architecture); if (layers.Count != MaxLayers) { throw new EncogError( "RBF Networks must have exactly three elements, " + "separated by ->."); } ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer( layers[0], input); ArchitectureLayer rbfLayer = ArchitectureParse.ParseLayer( layers[1], -1); ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer( layers[2], output); int inputCount = inputLayer.Count; int outputCount = outputLayer.Count; RBFEnum t; if (rbfLayer.Name.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Gaussian; } else if (rbfLayer.Name.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Multiquadric; } else if (rbfLayer.Name.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.InverseMultiquadric; } else if (rbfLayer.Name.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.MexicanHat; } else { throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.Name); } var holder = new ParamsHolder(rbfLayer.Params); int rbfCount = holder.GetInt("C", true, 0); var result = new RBFNetwork(inputCount, rbfCount, outputCount, t); return(result); }
public void TestResetCompute() { var network = new RBFNetwork(2, 1, 1); double total = network.LongTermMemory.Sum(); Assert.AreEqual(0, total, AIFH.DefaultPrecision); network.Reset(new BasicGenerateRandom()); total += network.LongTermMemory.Sum(); Assert.IsTrue(Math.Abs(total) > AIFH.DefaultPrecision); }
private void Train(RBFNetwork network, IMLDataSet trainingSet) { var train = new SVDTraining(network, trainingSet); //SVD is a single step solve int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while ((epoch < 1) && (train.Error > mMaxError)); }
/// <summary> /// Run the example. /// </summary> public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); Stream res = assembly.GetManifestResourceStream("AIFH_Vol2.Resources.iris.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.NormalizeRange(0, -1, 1); ds.NormalizeRange(1, -1, 1); ds.NormalizeRange(2, -1, 1); ds.NormalizeRange(3, -1, 1); IDictionary <string, int> species = ds.EncodeOneOfN(4); var particles = new RBFNetwork[ParticleCount]; for (int i = 0; i < particles.Length; i++) { particles[i] = new RBFNetwork(4, 4, 3); particles[i].Reset(rnd); } IList <BasicData> trainingData = ds.ExtractSupervised(0, 4, 4, 3); IScoreFunction score = new ScoreRegressionData(trainingData); var train = new TrainPSO(particles, score); PerformIterations(train, 100000, 0.05, true); var winner = (RBFNetwork)train.BestParticle; QueryOneOfN(winner, trainingData, species); }
public void TestPersistEG() { IMLDataSet trainingSet = XOR.CreateXORDataSet(); RBFNetwork network = new RBFNetwork(2, 4, 1, RBFEnum.Gaussian); SVDTraining training = new SVDTraining(network, trainingSet); training.Iteration(); XOR.VerifyXOR(network, 0.1); EncogDirectoryPersistence.SaveObject(EG_FILENAME, network); RBFNetwork network2 = (RBFNetwork)EncogDirectoryPersistence.LoadObject(EG_FILENAME); XOR.VerifyXOR(network2, 0.1); }
public BasicNetwork CreateNetwork() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(WindowSize)); network.AddLayer(new BasicLayer(new ActivationLinear(), true, WindowSize / 2 + 1)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); var rbfnetwork = new RBFNetwork(WindowSize, WindowSize / 2 + 1, 1, RBFEnum.Gaussian); return(network); }
public BasicNetwork CreateNetwork() { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(inputs)); network.AddLayer(new BasicLayer(new ActivationLinear(), true, inputs / 2 + 1)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); ///////////////////////////////// var rbfnetwork = new RBFNetwork(inputs, inputs / 2 + 1, 1, RBFEnum.Gaussian); return(network); }
private void CreateNetworl_Click(object sender, RoutedEventArgs e) { radialCount = Int32.Parse(RadialNeuronCount.Text); linearCount = Int32.Parse(LinearNeuronCount.Text); inputCount = Int32.Parse(InputVectorCount.Text); KNeighbors = Int32.Parse(KNeighborsCount.Text); scalingCoefficient = Double.Parse(ScalingCoefficientInput.Text, CultureInfo.InvariantCulture); Network = new RBFNetwork(new EuclideanDistance(), new GaussianRadialBasis(), new KNNWidthCalculator(new EuclideanDistance(), KNeighbors, scalingCoefficient), new RandomNeuronPositioner(), radialCount, linearCount, inputCount); }
/// <summary> /// Run the example. /// </summary> public void Process() { // read the iris data from the resources Assembly assembly = Assembly.GetExecutingAssembly(); Stream res = assembly.GetManifestResourceStream("AIFH_Vol2.Resources.iris.csv"); // did we fail to read the resouce if (res == null) { Console.WriteLine("Can't read iris data from embedded resources."); return; } // load the data var istream = new StreamReader(res); DataSet ds = DataSet.Load(istream); istream.Close(); // The following ranges are setup for the Iris data set. If you wish to normalize other files you will // need to modify the below function calls other files. ds.NormalizeRange(0, -1, 1); ds.NormalizeRange(1, -1, 1); ds.NormalizeRange(2, -1, 1); ds.NormalizeRange(3, -1, 1); IDictionary <string, int> species = ds.EncodeOneOfN(4); istream.Close(); var network = new RBFNetwork(4, 4, 3); IList <BasicData> trainingData = ds.ExtractSupervised(0, 4, 4, 3); IScoreFunction score = new ScoreRegressionData(trainingData); var train = new ContinuousACO(network, score, 30); PerformIterations(train, 100000, 0.05, true); train.FinishTraining(); QueryOneOfN(network, trainingData, species); }
public TimeSeries Forecast(RBFNetwork network, NormalizeArray norm, TimeSeries simulatedData, List <DateTime> futureTimes) { int data_count = simulatedData.Count; int future_data_count = futureTimes.Count; double[] data = new double[data_count + future_data_count]; for (int idx = 0; idx < data_count; ++idx) { data[idx] = simulatedData[idx]; } for (int idx = 0; idx < future_data_count; ++idx) { data[data_count + idx] = 0; } TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < future_data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (data_count + idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); data[data_count + idx] = prediction; ts.Add(futureTimes[idx], prediction, false); } return(ts); }
public void TestComputeClassification() { var network = new RBFNetwork(2, 1, 2); double[] ltm = { 2.0, // input 1 to RBF 1 2.0, // input 2 to RBF 1 5.0, // RBF width 2.0, // RBF, center-0 4.0, // RBF, center-1 3.0, // RBF1 to Output 1 4.0, // Bias to Output 1 5.0, // RBF1 to Output 2 6.0 }; // Bias to Output 2 Array.Copy(ltm, 0, network.LongTermMemory, 0, ltm.Length); double[] x = { 1, 2 }; double[] y = network.ComputeRegression(x); // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*3) + (1*4) = 7 Assert.AreEqual(7, y[0], AIFH.DefaultPrecision); // Inputs: (2*1) + (2*2) = 6 // RBF: Gaussian(6) = 1 // Outputs: (1*5) + (1*6) = 11 Assert.AreEqual(11, y[1], AIFH.DefaultPrecision); int cls = network.ComputeClassification(x); // class 1 is higher than class 0 Assert.AreEqual(1, cls); }
/// <summary> /// The entry point for this example. If you would like to make this example /// stand alone, then add to its own project and rename to Main. /// </summary> /// <param name="args">Not used.</param> public static void ExampleMain(string[] args) { string filename = ""; if (args.Length > 0) { filename = args[0]; string dataPath = filename; var fit = new FitTitanic(); fit.Process(dataPath); RBFNetwork bestNetwork = fit.BestNetwork; var submit = new SubmitTitanic(); submit.Submit(dataPath, bestNetwork, fit.Crossvalidation); } else { Console.WriteLine("Please provide your data directory path as the first argument."); } }
public void DataGettingTest() { DataGetter dg = new DataGetter(); var data = dg.GetData("approximation1.txt", ' '); var one = dg.GetTrainingDataWithOneOutput("approximation1.txt", 1); var test = dg.GetTrainingDataWithOneOutput("approximation_test.txt", 1); var two = dg.GetTrainingDataWithChosenInputs("classification.txt", new bool[] { true, true, true, true }); { Console.WriteLine("test"); } var distCal = new EuclideanDistance(); RBFNetwork network = new RBFNetwork(distCal, new GaussianRadialBasis(), new KNNWidthCalculator(distCal, 2, 1), new RandomNeuronPositioner(), 2, one[0].DesiredOutput.Count, one[0].Input.Count); network.Train(new BackpropagationTrainingParameters(0.5, 20, 0, -1, 1, one), test); var output = network.ProcessInput(test[0].Input); }
/// <inheritdoc/> public Object Read(Stream mask0) { var result = new RBFNetwork(); var flat = (FlatNetworkRBF)result.Flat; var ins0 = new EncogReadHelper(mask0); EncogFileSection section; while ((section = ins0.ReadNextSection()) != null) { if (section.SectionName.Equals("RBF-NETWORK") && section.SubSectionName.Equals("PARAMS")) { IDictionary <String, String> paras = section.ParseParams(); EngineArray.PutAll(paras, result.Properties); } if (section.SectionName.Equals("RBF-NETWORK") && section.SubSectionName.Equals("NETWORK")) { IDictionary <String, String> p = section.ParseParams(); flat.BeginTraining = EncogFileSection.ParseInt(p, BasicNetwork.TagBeginTraining); flat.ConnectionLimit = EncogFileSection.ParseDouble(p, BasicNetwork.TagConnectionLimit); flat.ContextTargetOffset = EncogFileSection.ParseIntArray( p, BasicNetwork.TagContextTargetOffset); flat.ContextTargetSize = EncogFileSection.ParseIntArray( p, BasicNetwork.TagContextTargetSize); flat.EndTraining = EncogFileSection.ParseInt(p, BasicNetwork.TagEndTraining); flat.HasContext = EncogFileSection.ParseBoolean(p, BasicNetwork.TagHasContext); flat.InputCount = EncogFileSection.ParseInt(p, PersistConst.InputCount); flat.LayerCounts = EncogFileSection.ParseIntArray(p, BasicNetwork.TagLayerCounts); flat.LayerFeedCounts = EncogFileSection.ParseIntArray(p, BasicNetwork.TagLayerFeedCounts); flat.LayerContextCount = EncogFileSection.ParseIntArray(p, BasicNetwork.TagLayerContextCount); flat.LayerIndex = EncogFileSection.ParseIntArray(p, BasicNetwork.TagLayerIndex); flat.LayerOutput = section.ParseDoubleArray(p, PersistConst.Output); flat.LayerSums = new double[flat.LayerOutput.Length]; flat.OutputCount = EncogFileSection.ParseInt(p, PersistConst.OutputCount); flat.WeightIndex = EncogFileSection.ParseIntArray(p, BasicNetwork.TagWeightIndex); flat.Weights = section.ParseDoubleArray(p, PersistConst.Weights); flat.BiasActivation = section.ParseDoubleArray(p, BasicNetwork.TagBiasActivation); } else if (section.SectionName.Equals("RBF-NETWORK") && section.SubSectionName.Equals("ACTIVATION")) { int index = 0; flat.ActivationFunctions = new IActivationFunction[flat.LayerCounts.Length]; foreach (String line in section.Lines) { IActivationFunction af; IList <String> cols = EncogFileSection .SplitColumns(line); String name = ReflectionUtil.AfPath + cols[0]; try { af = (IActivationFunction)ReflectionUtil.LoadObject(name); } catch (Exception e) { throw new PersistError(e); } for (int i = 0; i < af.ParamNames.Length; i++) { af.Params[i] = CSVFormat.EgFormat.Parse(cols[i + 1]); } flat.ActivationFunctions[index++] = af; } } else if (section.SectionName.Equals("RBF-NETWORK") && section.SubSectionName.Equals("RBF")) { int index = 0; int hiddenCount = flat.LayerCounts[1]; int inputCount = flat.LayerCounts[2]; flat.RBF = new IRadialBasisFunction[hiddenCount]; foreach (String line in section.Lines) { IRadialBasisFunction rbf; IList <String> cols = EncogFileSection .SplitColumns(line); String name = ReflectionUtil.RBFPath + cols[0]; try { rbf = (IRadialBasisFunction)ReflectionUtil.LoadObject(name); } catch (TypeLoadException ex) { throw new PersistError(ex); } catch (TargetException ex) { throw new PersistError(ex); } catch (MemberAccessException ex) { throw new PersistError(ex); } rbf.Width = CSVFormat.EgFormat.Parse(cols[1]); rbf.Peak = CSVFormat.EgFormat.Parse(cols[2]); rbf.Centers = new double[inputCount]; for (int i = 0; i < inputCount; i++) { rbf.Centers[i] = CSVFormat.EgFormat.Parse(cols[i + 3]); } flat.RBF[index++] = rbf; } } } return(result); }
/// <summary> /// Train one fold. /// </summary> /// <param name="k">The fold id.</param> /// <param name="fold">The fold.</param> public void TrainFold(int k, CrossValidateFold fold) { int noImprove = 0; double localBest = 0; // Get the training and cross validation sets. IList <BasicData> training = fold.TrainingSet; IList <BasicData> validation = fold.ValidationSet; // Create random particles for the RBF. IGenerateRandom rnd = new MersenneTwisterGenerateRandom(); var particles = new RBFNetwork[TitanicConfig.ParticleCount]; for (int i = 0; i < particles.Length; i++) { particles[i] = new RBFNetwork(TitanicConfig.InputFeatureCount, TitanicConfig.RbfCount, 1); particles[i].Reset(rnd); } /** * Construct a network to hold the best network. */ if (_bestNetwork == null) { _bestNetwork = new RBFNetwork(TitanicConfig.InputFeatureCount, TitanicConfig.RbfCount, 1); } /** * Setup the scoring function. */ IScoreFunction score = new ScoreTitanic(training); IScoreFunction scoreValidate = new ScoreTitanic(validation); /** * Setup particle swarm. */ bool done = false; var train = new TrainPSO(particles, score); int iterationNumber = 0; var line = new StringBuilder(); do { iterationNumber++; train.Iteration(); var best = (RBFNetwork)train.BestParticle; double trainingScore = train.LastError; double validationScore = scoreValidate.CalculateScore(best); if (validationScore > _bestScore) { Array.Copy(best.LongTermMemory, 0, _bestNetwork.LongTermMemory, 0, best.LongTermMemory.Length); _bestScore = validationScore; } if (validationScore > localBest) { noImprove = 0; localBest = validationScore; } else { noImprove++; } line.Length = 0; line.Append("Fold #"); line.Append(k + 1); line.Append(", Iteration #"); line.Append(iterationNumber); line.Append(": training correct: "); line.Append(trainingScore); line.Append(", validation correct: "); line.Append(validationScore); line.Append(", no improvement: "); line.Append(noImprove); if (noImprove > TitanicConfig.AllowNoImprovement) { done = true; } Console.WriteLine(line.ToString()); } while (!done); fold.Score = localBest; }
/// <summary> /// Creates and returns a new instance of RBF network </summary> /// <param name="inputNeuronsCount"> number of neurons in input layer </param> /// <param name="rbfNeuronsCount"> number of neurons in RBF layer </param> /// <param name="outputNeuronsCount"> number of neurons in output layer </param> /// <returns> instance of RBF network </returns> public static RBFNetwork createRbfNetwork(int inputNeuronsCount, int rbfNeuronsCount, int outputNeuronsCount) { RBFNetwork nnet = new RBFNetwork(inputNeuronsCount, rbfNeuronsCount, outputNeuronsCount); return(nnet); }