Exemple #1
0
        public Network(BaseCost costSettings, BaseOptimizer optimizerSettings)
        {
            IsLocked = false;
            Layers   = new List <BaseLayerKernel>();

            // Cost Function Setup
            CostFunction = costSettings.Type() switch
            {
                ECostType.CrossEntropyCost => new CrossEntropyCostKernel((CrossEntropyCost)costSettings),
                ECostType.ExponentionalCost => new ExponentialCostKernel((ExponentionalCost)costSettings),
                ECostType.GeneralizedKullbackLeiblerDivergence => new GeneralizedKullbackLeiblerDivergenceKernel((GeneralizedKullbackLeiblerDivergence)costSettings),
                ECostType.HellingerDistance => new HellingerDistanceKernel((HellingerDistance)costSettings),
                ECostType.ItakuraSaitoDistance => new ItakuraSaitoDistanceKernel((ItakuraSaitoDistance)costSettings),
                ECostType.KullbackLeiblerDivergence => new KullbackLeiblerDivergenceKernel((KullbackLeiblerDivergence)costSettings),
                ECostType.QuadraticCost => new QuadraticCostKernel((QuadraticCost)costSettings),
                _ => throw new ArgumentException("Cost Type Invalid.")
            };

            // Optimizer Function Setup
            OptimizerFunction = optimizerSettings.Type() switch
            {
                EOptimizerType.AdaDelta => new AdaDeltaKernel((AdaDelta)optimizerSettings),
                EOptimizerType.AdaGrad => new AdaGradKernel((AdaGrad)optimizerSettings),
                EOptimizerType.Adam => new AdamKernel((Adam)optimizerSettings),
                EOptimizerType.Adamax => new AdamaxKernel((Adamax)optimizerSettings),
                EOptimizerType.GradientDescent => new GradientDescentKernel((GradientDescent)optimizerSettings),
                EOptimizerType.Momentum => new MomentumKernel((Momentum)optimizerSettings),
                EOptimizerType.Nadam => new NadamKernel((Nadam)optimizerSettings),
                EOptimizerType.NesterovMomentum => new NesterovMomentumKernel((NesterovMomentum)optimizerSettings),
                EOptimizerType.RmsProp => new RmsPropKernel((RmsProp)optimizerSettings),
                _ => throw new ArgumentException("Optimizer Type Invalid.")
            };
        }
Exemple #2
0
        /// <summary>
        /// Create instance of the neural net with parameters
        /// </summary>
        /// <param name="optimizer"></param>
        /// <param name="cost"></param>
        /// <param name="metric"></param>
        public NeuralNet(BaseOptimizer optimizer, BaseCost cost, BaseMetric metric = null)
        {
            Layers          = new List <BaseLayer>();
            TrainingLoss    = new List <double>();
            TrainingMetrics = new List <double>();

            this.Optimizer = optimizer != null ? optimizer : throw new Exception("Need optimizer");
            this.Cost      = cost != null ? cost : throw new Exception("Need cost");
            Metric         = metric;
        }
Exemple #3
0
        /// <summary>
        /// Configures the model for training.
        /// </summary>
        /// <param name="optimizer">The optimizer function name used for training the model.</param>
        /// <param name="loss">The function name with which the training loss will be minimized.</param>
        /// <param name="metric"> The metric name to be evaluated by the model during training and testing.</param>
        /// <param name="regulizer">The regulizer instance to apply penalty on layers parameters.</param>
        public void Compile(BaseOptimizer optimizer, string loss, string metric = "", Regulizers regulizer = null)
        {
            CompileModel();

            learners.Add(optimizer.Get(modelOut, regulizer));
            lossName = loss;
            lossFunc = Losses.Get(loss, labelVariable, modelOut);
            if (!string.IsNullOrWhiteSpace(metric))
            {
                metricName = metric;
                metricFunc = Metrics.Get(metric, labelVariable, modelOut);
            }
            else
            {
                metricName = loss;
                metricFunc = lossFunc;
            }
        }
Exemple #4
0
        public override void Initialize(BaseOptimizer <U> optimizer)
        {
            wOpt = optimizer.Clone();
            bOpt = optimizer.Clone();


            double lim = 3.0 / Math.Sqrt(InputShape[0]);

            var w0 = ND.Uniform(-lim, lim, InputShape[0], OutputShape[0]).Cast <U>();
            var b0 = ND.Zeros <U>(1, OutputShape[0]);

            paramsCount = w0.Count + b0.Count;

            weights = Variable.CreateNDarray <U>("w", w0);
            biases  = Variable.CreateNDarray <U>("b", b0);
            wT      = Variable.CreateNDarray <U>("wT");

            fwExpr = ND.Dot(xFw, weights) + biases;

            gwExpr = ND.Dot(layInp.T, agBw);
            gbExpr = agBw.Sum(0, true);
            bwExpr = ND.Dot(agBw, wT);
        }
Exemple #5
0
 public Network(BaseOptimizer <U> optimizer, BaseLoss <U> lossf, BaseAccuracy <U> accuracy)
 {
     this.optimizer = optimizer;
     this.lossf     = lossf;
     this.accuracy  = accuracy;
 }
Exemple #6
0
 public void Compile(BaseOptimizer optimizer, LossType loss, MetricType metric)
 {
     OptimizerFn = optimizer;
     LossFn      = BaseLoss.Get(loss);
     MetricFn    = BaseMetric.Get(metric);
 }
Exemple #7
0
 public override void Initialize(BaseOptimizer <U> optimizer)
 {
 }
Exemple #8
0
        static void Main(string[] args)
        {
            Operations K = new Operations();

            //Load array to the tensor
            NDArray x = new NDArray(3, 3);

            x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5);
            x.Print("Load X Values");

            NDArray y = new NDArray(3, 1);

            y.Load(1, 0, 1);
            y.Print("Load Y Values");

            //Create two layers, one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid");

            //Connect input by passing data from one layer to another
            fc1.Forward(x);
            fc2.Forward(fc1.Output);
            var preds = fc2.Output;

            preds.Print("Predictions");

            //Calculate the mean square error cost between the predicted and expected values
            BaseCost cost       = new BinaryCrossEntropy();
            var      costValues = cost.Forward(preds, y);

            costValues.Print("BCE Cost");

            //Calculate the mean absolute metric value for the predicted vs expected values
            BaseMetric metric       = new BinaryAccuacy();
            var        metricValues = metric.Calculate(preds, y);

            metricValues.Print("Acc Metric");

            var grad = cost.Backward(preds, y);

            fc2.Backward(grad);
            fc1.Backward(fc2.InputGrad);

            Console.WriteLine("Param value for FC1 before ADAM optimization");
            fc1.PrintParams(printGrads: false);

            //Initialise ADAM optimizer with default learning rate of 0.01
            BaseOptimizer optimizer = BaseOptimizer.Get("adam");

            //Change the value of learning rate to see the jump is weight changes.
            //optimizer.LearningRate = 0.1;

            //Apply optimizer for the first layer for first iteration
            optimizer.Update(1, fc1);
            Console.WriteLine("Param value for FC1 after ADAM optimization");
            fc1.PrintParams(printGrads: false);

            Console.WriteLine("Param value for FC2 before ADAM optimization");
            fc2.PrintParams(printGrads: false);

            //Apply optimizer for the first layer for first iteration
            optimizer.Update(1, fc2);
            Console.WriteLine("Param value for FC2 after ADAM optimization");
            fc2.PrintParams(printGrads: false);

            Console.ReadLine();
        }
Exemple #9
0
 public abstract void Initialize(BaseOptimizer <U> optimizer);