Esempio n. 1
0
        public static LogisticFunction GetLogisticFunction(LogisticFunctions logistic)
        {
            switch (logistic)
            {
            case LogisticFunctions.Sigmoid:
                return(new Sigmoid());

            case LogisticFunctions.SoftPlus:
                return(new SoftPlus());

            case LogisticFunctions.HypTan:
                return(new HyperbolicTan());

            case LogisticFunctions.ArcTan:
                return(new ArcTan());

            case LogisticFunctions.IdentityFunction:
                return(new IdentityFunction());

            case LogisticFunctions.ReLF:
                return(new RectifiedLinearUnit());

            case LogisticFunctions.SoftMax:
                return(new SoftMax());

            default:
                return(null);
            }
        }
Esempio n. 2
0
        public static bool IsCanonicalLink(LogisticFunctions logistic, LossFunctions loss)
        {
            switch (logistic)
            {
            case LogisticFunctions.Sigmoid:
                return(loss == LossFunctions.MLE);

            case LogisticFunctions.IdentityFunction:
                return(loss == LossFunctions.MSE);

            case LogisticFunctions.SoftMax:
                return(loss == LossFunctions.CE);

            default:
                return(false);
            }
        }
        /// <summary>
        /// 3Layer Backpropagation Class
        /// </summary>
        /// <param name="inputWeight">Input Weight.</param>
        /// <param name="outputWeight">Output Weight.</param>
        /// <param name="hiddenLayer">Hidden layer.</param>
        /// <param name="outputLayer">Output layer.</param>
        /// <param name="hiddenLogisticFunc">Hidden Layer Logistic Function. Default = SigmoidFunc</param>
        /// <param name="outputLogisticFunc">Output Layer Logistic Function. Default = SigmoidFunc</param>
        /// <param name="lossFunc">Output Layer Loss Function. Default = MSE</param>
        /// <param name="learnRate">Learn rate. Default = 0.01</param>
        public BackPropagation(Matrix inputWeight, Matrix outputWeight, Matrix hiddenLayer, Matrix outputLayer,
                               LogisticFunctions hiddenLogisticFunc = LogisticFunctions.Sigmoid,
                               LogisticFunctions outputLogisticFunc = LogisticFunctions.Sigmoid,
                               LossFunctions lossFunc = LossFunctions.MSE, double learnRate = 0.01)
        {
            _inputWeight  = inputWeight;
            _outputWeight = outputWeight;
            _hiddenLayer  = hiddenLayer;
            _outputLayer  = outputLayer;

            HiddenLogisticFunc = hiddenLogisticFunc;
            OutputLogisticFunc = outputLogisticFunc;
            LossFunc           = lossFunc;

            _hiddenLogisticFunc = GetLogisticFunction(hiddenLogisticFunc);
            _outputLogisticFunc = GetLogisticFunction(outputLogisticFunc);
            _lossFunc           = GetLossFunction(lossFunc);

            LearnRate = learnRate;
        }