Exemple #1
0
        /// <summary>
        /// Creates readout layer configuration
        /// </summary>
        /// <param name="testDataRatio">Specifies what part of available data to be used as test data</param>
        /// <param name="numOfAttempts">Number of regression attempts. Each readout network will try to learn numOfAttempts times</param>
        /// <param name="numOfEpochs">Number of training epochs within an attempt</param>
        ReadoutLayerSettings CreateReadoutLayerCfg(double testDataRatio, int numOfAttempts, int numOfEpochs)
        {
            //For each output field we will use prediction of two networks
            //First network having only Identity output neuron and associated the resilient back propagation trainer
            FeedForwardNetworkSettings ffNet1Cfg = new FeedForwardNetworkSettings(new IdentitySettings(),
                                                                                  null,
                                                                                  new RPropTrainerSettings(numOfAttempts, numOfEpochs)
                                                                                  );
            //Second network having Identity output neuron, hidden layer consisting of 5 LeakyReLU neurons
            //and associated the resilient back propagation trainer
            HiddenLayerSettings        hiddenLayerCfg = new HiddenLayerSettings(5, new LeakyReLUSettings());
            FeedForwardNetworkSettings ffNet2Cfg      = new FeedForwardNetworkSettings(new IdentitySettings(),
                                                                                       new HiddenLayersSettings(hiddenLayerCfg),
                                                                                       new RPropTrainerSettings(numOfAttempts, numOfEpochs)
                                                                                       );
            //Create default networks configuration for forecasting
            DefaultNetworksSettings defaultNetworksCfg = new DefaultNetworksSettings(null, new ForecastNetworksSettings(ffNet1Cfg, ffNet2Cfg));
            //Create readout units. We will forecast next High and Low prices. Both fields are real numbers.
            ReadoutUnitSettings highReadoutUnitCfg = new ReadoutUnitSettings("High", new ForecastTaskSettings(new RealFeatureFilterSettings()));
            ReadoutUnitSettings lowReadoutUnitCfg  = new ReadoutUnitSettings("Low", new ForecastTaskSettings(new RealFeatureFilterSettings()));
            //Create readout layer configuration
            ReadoutLayerSettings readoutLayerCfg = new ReadoutLayerSettings(new ReadoutUnitsSettings(highReadoutUnitCfg,
                                                                                                     lowReadoutUnitCfg
                                                                                                     ),
                                                                            testDataRatio,
                                                                            ReadoutLayerSettings.AutoFolds,
                                                                            ReadoutLayerSettings.DefaultRepetitions,
                                                                            defaultNetworksCfg
                                                                            );

            return(readoutLayerCfg);
        }
Exemple #2
0
        /// <summary>
        /// Creates the readout layer configuration.
        /// </summary>
        /// <param name="foldDataRatio">Specifies what part of available data to be used as the fold data.</param>
        /// <param name="numOfAttempts">Number of regression attempts. Each readout network will try to learn numOfAttempts times.</param>
        /// <param name="numOfEpochs">Number of training epochs within an attempt.</param>
        ReadoutLayerSettings CreateReadoutLayerCfg(double foldDataRatio, int numOfAttempts, int numOfEpochs)
        {
            //For each output field we will use prediction of two networks
            //First network having only Identity output neuron and associated the resilient back propagation trainer
            FeedForwardNetworkSettings ffNet1Cfg = new FeedForwardNetworkSettings(new AFAnalogIdentitySettings(),
                                                                                  null,
                                                                                  new RPropTrainerSettings(numOfAttempts, numOfEpochs)
                                                                                  );
            //Second network having Identity output neuron, hidden layer consisting of 5 LeakyReLU neurons
            //and associated the resilient back propagation trainer
            HiddenLayerSettings        hiddenLayerCfg = new HiddenLayerSettings(5, new AFAnalogLeakyReLUSettings());
            FeedForwardNetworkSettings ffNet2Cfg      = new FeedForwardNetworkSettings(new AFAnalogIdentitySettings(),
                                                                                       new HiddenLayersSettings(hiddenLayerCfg),
                                                                                       new RPropTrainerSettings(numOfAttempts, numOfEpochs)
                                                                                       );
            //Create the cluster chain configuration for the forecast and the default configuration for the forecast task.
            CrossvalidationSettings           crossvalidationCfg  = new CrossvalidationSettings(foldDataRatio);
            TNRNetClusterRealNetworksSettings networksCfg         = new TNRNetClusterRealNetworksSettings(ffNet1Cfg, ffNet2Cfg);
            TNRNetClusterRealSettings         realClusterCfg      = new TNRNetClusterRealSettings(networksCfg, new TNRNetClusterRealWeightsSettings());
            TNRNetClusterChainRealSettings    realClusterChainCfg = new TNRNetClusterChainRealSettings(crossvalidationCfg, new TNRNetClustersRealSettings(realClusterCfg));
            TaskDefaultsSettings taskDefaultsCfg = new TaskDefaultsSettings(null, realClusterChainCfg);
            //Create readout unit configurations. We will forecast next High and Low prices.
            ReadoutUnitSettings highReadoutUnitCfg = new ReadoutUnitSettings("High", new ForecastTaskSettings());
            ReadoutUnitSettings lowReadoutUnitCfg  = new ReadoutUnitSettings("Low", new ForecastTaskSettings());
            //Create readout layer configuration
            ReadoutLayerSettings readoutLayerCfg = new ReadoutLayerSettings(taskDefaultsCfg,
                                                                            new ReadoutUnitsSettings(highReadoutUnitCfg,
                                                                                                     lowReadoutUnitCfg
                                                                                                     ),
                                                                            null
                                                                            );

            return(readoutLayerCfg);
        }
        public static INetwork BuildFFN(NetworkSettings settings, HiddenLayerSettings hiddenSettings)
        {
            Network ffn = new Network(settings);

            ffn.BuildInputNeurons(settings);
            ffn.HiddenLayers = ffn.BuildHiddenLayers(ffn.InputNeurons, hiddenSettings);
            ffn.BuildOutputNeurons(ffn.HiddenLayers.Last(), settings.OutputNeuronsCount, settings.OutputLayerFunction);
            return(ffn);
        }
Exemple #4
0
 /// <summary>
 /// Copy constructor
 /// </summary>
 /// <param name="source">Source instance</param>
 public HiddenLayerSettings(HiddenLayerSettings source)
 {
     NumOfNeurons = source.NumOfNeurons;
     Activation   = null;
     if (source.Activation != null)
     {
         Activation = ActivationFactory.DeepCloneActivationSettings(source.Activation);
     }
     return;
 }
Exemple #5
0
        /// <summary>
        /// Runs the example code.
        /// </summary>
        public void Run()
        {
            //Create configuration of the feed forward network having Identity output layer and two LeakyReLU hidden layers
            //with associated resilient back propagation trainer configuration
            const int                  HiddenLayerSize = 3;
            HiddenLayerSettings        hiddenLayerCfg  = new HiddenLayerSettings(HiddenLayerSize, new LeakyReLUSettings());
            FeedForwardNetworkSettings ffNetCfg        = new FeedForwardNetworkSettings(new IdentitySettings(),
                                                                                        new HiddenLayersSettings(hiddenLayerCfg, hiddenLayerCfg),
                                                                                        new RPropTrainerSettings(2, 200)
                                                                                        );
            //Collect training data
            VectorBundle trainingData = CreateTrainingData();
            //Create network instance
            //We specify 2 input values, 3 output values and previously prepared network structure configuration
            FeedForwardNetwork ffNet = new FeedForwardNetwork(2, 3, ffNetCfg);

            //Training
            _log.Write("Training");
            _log.Write("--------");
            //Create trainer instance
            RPropTrainer trainer = new RPropTrainer(ffNet,
                                                    trainingData.InputVectorCollection,
                                                    trainingData.OutputVectorCollection,
                                                    (RPropTrainerSettings)ffNetCfg.TrainerCfg,
                                                    new Random(0)
                                                    );

            //Training loop
            while (trainer.Iteration() && trainer.MSE > 1e-6)
            {
                _log.Write($"  Attempt {trainer.Attempt} / Epoch {trainer.AttemptEpoch,3} Mean Squared Error = {Math.Round(trainer.MSE, 8).ToString(CultureInfo.InvariantCulture)}", false);
            }
            _log.Write(string.Empty);

            //Training is done
            //Display network computation results
            _log.Write("Trained network computations:");
            _log.Write("-----------------------------");
            foreach (double[] input in trainingData.InputVectorCollection)
            {
                double[] results = ffNet.Compute(input);
                _log.Write($"  Input {input[0]} {input[1]} Results: AND={Math.Round(results[0])} OR={Math.Round(results[1])} XOR={Math.Round(results[2])}");
            }
            _log.Write(string.Empty);

            //Finished
            return;
        } //Run
Exemple #6
0
            //Methods
            /// <summary>
            /// See the base.
            /// </summary>
            public override bool Equals(object obj)
            {
                if (obj == null)
                {
                    return(false);
                }
                HiddenLayerSettings cmpSettings = obj as HiddenLayerSettings;

                if (NumOfNeurons != cmpSettings.NumOfNeurons ||
                    !Equals(Activation, cmpSettings.Activation)
                    )
                {
                    return(false);
                }
                return(true);
            }
        public static INetwork BuildRNN(NetworkSettings settings, HiddenLayerSettings hiddenSettings)
        {
            Network rnn = new Network(settings);

            rnn.BuildInputNeurons(settings);
            rnn.HiddenLayers = rnn.BuildHiddenLayers(rnn.InputNeurons, hiddenSettings);
            var prevLayer             = rnn.BuildOutputNeurons(rnn.HiddenLayers.Last(), settings.OutputNeuronsCount, settings.OutputLayerFunction);
            var outputLayer           = rnn.BuildInputNeurons(settings, 1);
            var prevLayerEnumerator   = prevLayer.GetEnumerator();
            var outputLayerEnumerator = outputLayer.GetEnumerator();

            while (prevLayerEnumerator.MoveNext())
            {
                outputLayerEnumerator.MoveNext();
                rnn.ConnectAxon(prevLayerEnumerator.Current, outputLayerEnumerator.Current, settings.OutputNeuronsCount);
            }

            return(rnn);
        }
        internal virtual IList <ICollection <IHiddenNeuron> > BuildHiddenLayers(IEnumerable <INeuron> previousLayer, HiddenLayerSettings settings, ushort recurrentInputs = 0)
        {
            var tempLayer = new List <ICollection <IHiddenNeuron> >(settings.LayersCount);

            IEnumerable <INeuron> prevLayer = previousLayer;

            for (int i = 0; i < settings.LayersCount; i++)
            {
                var tempHidden = new List <IHiddenNeuron>(settings.NeuronsCount);
                for (int j = 0; j < settings.NeuronsCount; j++)
                {
                    var neuron = new HiddenNeuron(settings.FunctionType, recurrentInputs);
                    tempHidden.Add(neuron);
                    foreach (var inNeuron in prevLayer)
                    {
                        ConnectAxon(inNeuron, neuron, settings.NeuronsCount);
                    }
                }

                if (settings.HasBiasNeuron)
                {
                    var biasNeuron = new BiasNeuron();

                    tempHidden.Add(biasNeuron);

                    foreach (var inNeuron in previousLayer)
                    {
                        ConnectAxon(inNeuron, biasNeuron, settings.NeuronsCount);
                    }
                }
                prevLayer = tempHidden;
                tempLayer.Add(tempHidden);
            }

            return(tempLayer);
        }
        public static INetwork BuildLSTM(NetworkSettings settings, CellLayerSettings lstmSettings, HiddenLayerSettings prevHiddenSettings, HiddenLayerSettings lastHiddenSettings)
        {
            Network lstm = new Network(settings);

            IEnumerable <INeuron> prevLayer = lstm.BuildInputNeurons(settings);

            if (prevHiddenSettings != null)
            {
                lstm.HiddenLayers = lstm.BuildHiddenLayers(prevLayer, prevHiddenSettings);
            }

            if (lstm.HiddenLayers != null)
            {
                prevLayer = lstm.HiddenLayers.Last();
            }
            else
            {
                lstm.HiddenLayers = new List <ICollection <IHiddenNeuron> >();
                prevLayer         = lstm.InputNeurons;
            }

            var lstmLayers = lstm.BuildLSTMCells(prevLayer, lstmSettings);

            foreach (ICollection <IHiddenNeuron> layer in lstmLayers)
            {
                lstm.HiddenLayers.Add(layer);
                prevLayer = layer;
            }

            if (lastHiddenSettings != null)
            {
                var hiddenLayers = lstm.BuildHiddenLayers(prevLayer, lastHiddenSettings);
                foreach (ICollection <IHiddenNeuron> layer in hiddenLayers)
                {
                    lstm.HiddenLayers.Add(layer);
                }
            }

            return(lstm);
        }