Ejemplo n.º 1
0
        //Constructor
        /// <summary>
        /// Constructs new instance of linear regression trainer
        /// </summary>
        /// <param name="net">FF network to be trained</param>
        /// <param name="inputVectorCollection">Predictors (input)</param>
        /// <param name="outputVectorCollection">Ideal outputs (the same number of rows as number of inputs)</param>
        /// <param name="maxEpoch">Maximum allowed training epochs</param>
        /// <param name="rand">Random object to be used for adding a white-noise to predictors</param>
        /// <param name="settings">Optional startup parameters of the trainer</param>
        public LinRegrTrainer(FeedForwardNetwork net,
                              List <double[]> inputVectorCollection,
                              List <double[]> outputVectorCollection,
                              int maxEpoch,
                              System.Random rand,
                              LinRegrTrainerSettings settings = null
                              )
        {
            //Check network readyness
            if (!net.Finalized)
            {
                throw new Exception("Can´t create LinRegr trainer. Network structure was not finalized.");
            }
            //Check network conditions
            if (net.LayerCollection.Count != 1 || !(net.LayerCollection[0].Activation is Identity))
            {
                throw new Exception("Can´t create LinRegr trainer. Network structure is not complient (single layer having Identity activation).");
            }
            //Check samples conditions
            if (inputVectorCollection.Count < inputVectorCollection[0].Length + 1)
            {
                throw new Exception("Can´t create LinRegr trainer. Insufficient number of training samples. Minimum is " + (inputVectorCollection[0].Length + 1).ToString() + ".");
            }
            //Parameters
            _settings = settings;
            if (_settings == null)
            {
                //Default parameters
                _settings = new LinRegrTrainerSettings();
            }
            _net = net;
            _inputVectorCollection           = inputVectorCollection;
            _outputVectorCollection          = outputVectorCollection;
            _outputSingleColMatrixCollection = new List <Matrix>(_net.NumOfOutputValues);
            for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
            {
                Matrix outputSingleColMatrix = new Matrix(_outputVectorCollection.Count, 1);
                for (int row = 0; row < _outputVectorCollection.Count; row++)
                {
                    //Output
                    outputSingleColMatrix.Data[row][0] = _outputVectorCollection[row][outputIdx];
                }
                _outputSingleColMatrixCollection.Add(outputSingleColMatrix);
            }
            _rand     = rand;
            _maxEpoch = maxEpoch;
            _epoch    = 0;
            _alphas   = new double[_maxEpoch];
            //Plan the iterations alphas
            double coeff = (maxEpoch > 1) ? _settings.MaxStretch / (maxEpoch - 1) : 0;

            for (int i = 0; i < _maxEpoch; i++)
            {
                _alphas[i] = _settings.HiNoiseIntensity - _settings.HiNoiseIntensity * Math.Tanh(i * coeff);
                _alphas[i] = Math.Max(0, _alphas[i]);
            }
            //Ensure the last alpha is zero
            _alphas[_maxEpoch - 1] = 0;
            return;
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Creates the instance and initialize it from given xml element.
        /// This is the preferred way to instantiate reservoir settings.
        /// </summary>
        /// <param name="elem">
        /// Xml data containing feed forward network settings.
        /// Content of xml element is always validated against the xml schema.
        /// </param>
        public FeedForwardNetworkSettings(XElement elem)
        {
            //Validation
            ElemValidator validator     = new ElemValidator();
            Assembly      assemblyRCNet = Assembly.GetExecutingAssembly();

            validator.AddXsdFromResources(assemblyRCNet, "RCNet.Neural.Network.FF.FeedForwardNetworkSettings.xsd");
            validator.AddXsdFromResources(assemblyRCNet, "RCNet.RCNetTypes.xsd");
            XElement feedForwardNetworkSettingsElem = validator.Validate(elem, "rootElem");

            //Parsing
            OutputLayerActivation = ActivationFactory.LoadSettings(feedForwardNetworkSettingsElem.Descendants().First());
            if (!IsAllowedActivation(OutputLayerActivation, out Interval outputRange))
            {
                throw new ApplicationException($"Activation can't be used in FF network. Activation function has to be stateless and has to support derivative calculation.");
            }
            OutputRange      = outputRange;
            RegressionMethod = ParseTrainingMethodType(feedForwardNetworkSettingsElem.Attribute("regressionMethod").Value);
            //Hidden layers
            HiddenLayerCollection = new List <HiddenLayerSettings>();
            XElement hiddenLayersElem = feedForwardNetworkSettingsElem.Descendants("hiddenLayers").FirstOrDefault();

            if (hiddenLayersElem != null)
            {
                foreach (XElement layerElem in hiddenLayersElem.Descendants("layer"))
                {
                    HiddenLayerCollection.Add(new HiddenLayerSettings(layerElem));
                }
            }
            //Trainers
            LinRegrTrainerCfg = null;
            RPropTrainerCfg   = null;
            switch (RegressionMethod)
            {
            case TrainingMethodType.Linear:
                XElement linRegrTrainerElem = feedForwardNetworkSettingsElem.Descendants("linRegrTrainer").FirstOrDefault();
                if (linRegrTrainerElem != null)
                {
                    LinRegrTrainerCfg = new LinRegrTrainerSettings(linRegrTrainerElem);
                }
                else
                {
                    LinRegrTrainerCfg = new LinRegrTrainerSettings();
                }
                break;

            case TrainingMethodType.Resilient:
                XElement resPropTrainerElem = feedForwardNetworkSettingsElem.Descendants("resPropTrainer").FirstOrDefault();
                if (resPropTrainerElem != null)
                {
                    RPropTrainerCfg = new RPropTrainerSettings(resPropTrainerElem);
                }
                else
                {
                    RPropTrainerCfg = new RPropTrainerSettings();
                }
                break;
            }
            return;
        }
Ejemplo n.º 3
0
 /// <summary>
 /// Deep copy constructor
 /// </summary>
 /// <param name="source">Source instance</param>
 public LinRegrTrainerSettings(LinRegrTrainerSettings source)
 {
     HiNoiseIntensity = source.HiNoiseIntensity;
     MaxStretch       = source.MaxStretch;
     ZeroMargin       = source.ZeroMargin;
     return;
 }
Ejemplo n.º 4
0
 /// <summary>
 /// Deep copy constructor
 /// </summary>
 /// <param name="source">Source instance</param>
 public LinRegrTrainerSettings(LinRegrTrainerSettings source)
 {
     NumOfAttempts      = source.NumOfAttempts;
     NumOfAttemptEpochs = source.NumOfAttemptEpochs;
     HiNoiseIntensity   = source.HiNoiseIntensity;
     MaxStretch         = source.MaxStretch;
     ZeroMargin         = source.ZeroMargin;
     return;
 }
Ejemplo n.º 5
0
        //Methods
        /// <summary>
        /// See the base.
        /// </summary>
        public override bool Equals(object obj)
        {
            if (obj == null)
            {
                return(false);
            }
            LinRegrTrainerSettings cmpSettings = obj as LinRegrTrainerSettings;

            if (HiNoiseIntensity != cmpSettings.HiNoiseIntensity ||
                MaxStretch != cmpSettings.MaxStretch ||
                ZeroMargin != cmpSettings.ZeroMargin
                )
            {
                return(false);
            }
            return(true);
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Creates the deep copy instance of this instance
        /// </summary>
        public LinRegrTrainerSettings DeepClone()
        {
            LinRegrTrainerSettings clone = new LinRegrTrainerSettings(this);

            return(clone);
        }