예제 #1
0
    public static Pixbuf Get(ManagedCNN cnn, int layer)
    {
        if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution)
        {
            var Transposed = new ManagedArray(cnn.Layers[layer].Bias);
            ManagedMatrix.Transpose(Transposed, cnn.Layers[layer].Bias);

            var pixbuf = new Pixbuf(Colorspace.Rgb, false, 8, Transposed.x, Transposed.y);

            // Get normalization values
            double min = Double.MaxValue;
            double max = Double.MinValue;

            FullyConnected.GetNormalization(Transposed, ref min, ref max);

            Activation.Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Transposed);

            return(pixbuf);
        }

        // return empty pixbuf
        return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1));
    }
예제 #2
0
    public static Pixbuf Get(ManagedCNN cnn, int layer, int i, int j)
    {
        if (layer >= 0 && layer < cnn.Layers.Count && cnn.Layers[layer].Type == LayerTypes.Convolution && i >= 0 && i < cnn.Layers[layer].FeatureMap.i && j >= 0 && j < cnn.Layers[layer].FeatureMap.j)
        {
            var FeatureMap = new ManagedArray(cnn.Layers[layer].FeatureMap.x, cnn.Layers[layer].FeatureMap.y, cnn.Layers[layer].FeatureMap.z);
            var Transposed = new ManagedArray(FeatureMap);
            var pixbuf     = new Pixbuf(Colorspace.Rgb, false, 8, FeatureMap.y, FeatureMap.x);

            ManagedOps.Copy4DIJ2D(FeatureMap, cnn.Layers[layer].FeatureMap, i, j);
            ManagedMatrix.Transpose(Transposed, FeatureMap);

            // Get normalization values
            double min = Double.MaxValue;
            double max = Double.MinValue;

            FullyConnected.GetNormalization(Transposed, ref min, ref max);

            Activation.Draw(pixbuf, Transposed, min, max);

            ManagedOps.Free(Transposed);

            return(pixbuf);
        }

        // return empty pixbuf
        return(new Pixbuf(Colorspace.Rgb, false, 8, 1, 1));
    }
예제 #3
0
        private Layer ConvertFullyConnected(tflite.Operator op)
        {
            var inputs  = op.GetInputsArray();
            var input   = _graph.Tensors(inputs[0]).Value;
            var options = op.BuiltinOptions <tflite.FullyConnectedOptions>().Value;
            var weights = _graph.Tensors(inputs[1]).Value;
            var bias    = _graph.Tensors(inputs[2]).Value;

            if (input.ShapeLength == 4 && (input.Shape(1) != 1 || input.Shape(2) != 1))
            {
                var flatten = new TensorflowFlatten(input.GetShapeArray().ToNCHW());
                var layer   = new FullyConnected(flatten.Output.Dimensions, _model.GetTensor <float>(weights), _model.GetTensor <float>(bias),
                                                 options.FusedActivationFunction.ToActivationFunction());
                layer.Input.SetConnection(flatten.Output);
                _inputs.Add(flatten.Input, inputs[0]);
                _outputs.Add(op.Outputs(0), layer.Output);
                return(layer);
            }
            else
            {
                var layer = new FullyConnected(input.GetShapeArray().ToNCHW(), _model.GetTensor <float>(weights), _model.GetTensor <float>(bias),
                                               options.FusedActivationFunction.ToActivationFunction());
                _inputs.Add(layer.Input, inputs[0]);
                _outputs.Add(op.Outputs(0), layer.Output);
                return(layer);
            }
        }
예제 #4
0
        public static Model ConvolutionalNeuralNetworkModel()
        {
            var images = Variable <float>();
            var labels = Variable <float>();

            ILayer <float> net = new Reshape <float>(images, PartialShape.Create(-1, 1, 28, 28));

            net = new Convolution2D <float>(net.Output, 5, 5, 16);
            net = new ActivationReLU <float>(net.Output);
            net = new Pooling2D <float>(net.Output, PoolingMode.MAX, 2, 2, 2, 2);

            net = new Convolution2D <float>(net.Output, 5, 5, 32);
            net = new ActivationTanh <float>(net.Output);
            net = new Pooling2D <float>(net.Output, PoolingMode.MAX, 2, 2, 2, 2);

            net = new Reshape <float>(net.Output, PartialShape.Create(-1, net.Output.Shape.Skip(1).Aggregate(ScalarOps.Mul)));
            net = new FullyConnected <float>(net.Output, 50);
            net = new ActivationTanh <float>(net.Output);
            net = new FullyConnected <float>(net.Output, 10);

            return(new Model {
                Loss = new SoftmaxCrossEntropy <float>(net.Output, labels),
                Images = images,
                Labels = labels
            });
        }
예제 #5
0
        public void Infer(FullyConnected layer, FullyConnectedLayerArgument argument, InferenceContext context)
        {
            var inputAlloc = context.MainMemoryMap[layer.Input.Connection.From];
            var outputAlloc = context.MainMemoryMap[layer.Output];

            argument.MainMemoryInputAddress = inputAlloc.GetAddress();
            argument.MainMemoryOutputAddress = outputAlloc.GetAddress();
        }
예제 #6
0
 public FullyConnectedLayerArgument Convert(FullyConnected layer, ConvertContext context)
 {
     return new FullyConnectedLayerArgument
     {
         InputChannels = (uint)layer.Input.Dimensions[1],
         OutputChannels = (uint)layer.Output.Dimensions[1],
         Activation = layer.FusedActivationFunction,
         Weights = layer.Weights.ToArray(),
         Bias = layer.Bias.ToArray()
     };
 }
예제 #7
0
        private Layer ConvertMul(paddle.OpDesc op)
        {
            var x      = GetParameter(op.Inputs, "X").Arguments[0];
            var y      = GetParameter(op.Inputs, "Y").Arguments[0];
            var output = GetParameter(op.Outputs, "Out").Arguments[0];

            var layer = new FullyConnected(GetVarShape(x), LoadVarData <float>(y).Transpose(new[] { 1, 0 }), null, ActivationFunctionType.Linear);

            _inputs.Add(layer.Input, x);
            _outputs.Add(output, layer.Output);
            return(layer);
        }
예제 #8
0
        public static Symbol CreateLenet()
        {
            Symbol data       = Symbol.Variable("data");
            Symbol data_label = Symbol.Variable("data_label");

            // first conv

            //  mx.symbol.Convolution(data = data, kernel = (5, 5), num_filter = 20)
            Symbol conv1 = new Convolution(new Shape(5, 5), 20).CreateSymbol(data);
            // tanh1 = mx.symbol.Activation(data=conv1, act_type="tanh")
            Symbol tanh1 = new Activation().CreateSymbol(conv1);
            // pool1 = mx.symbol.Pooling(data=tanh1, pool_type="max", kernel = (2,2), stride = (2,2))
            Symbol pool1 = new Pooling(new Shape(2, 2), new Shape(2, 2)).CreateSymbol(tanh1);
            // second conv
            // conv2 = mx.symbol.Convolution(data=pool1, kernel=(5,5), num_filter=50)
            Symbol conv2 = new Convolution(new Shape(5, 5), 50).CreateSymbol(pool1);
            // tanh2 = mx.symbol.Activation(data=conv2, act_type="tanh")
            Symbol tanh2 = new Activation().CreateSymbol(conv2);
            // pool2 = mx.symbol.Pooling(data=tanh2, pool_type="max", kernel = (2,2), stride = (2,2))
            Symbol pool2 = new Pooling(new Shape(2, 2), new Shape(2, 2)).CreateSymbol(tanh2);

            // first fullc
            // flatten = mx.symbol.Flatten(data=pool2)
            Symbol flatten = new Flatten().CreateSymbol(pool2);
            // fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=500)
            Symbol fc1 = new FullyConnected(500).CreateSymbol(flatten);
            // tanh3 = mx.symbol.Activation(data=fc1, act_type="tanh")
            Symbol tanh3 = new Activation().CreateSymbol(fc1);

            // second fullc
            // fc2 = mx.symbol.FullyConnected(data=tanh3, num_hidden=num_classes)
            Symbol fc2 = new FullyConnected(10).CreateSymbol(tanh3);

            // loss
            // lenet = mx.symbol.SoftmaxOutput(data=fc2, name='softmax')
            Symbol lenet = new SoftmaxOutput().CreateSymbol(fc2, data_label);

            System.IO.File.WriteAllText("lenet.json", lenet.ToJSON());

            foreach (var item in lenet.ListAuxiliaryStates())
            {
                Console.WriteLine(item);
            }

            return(lenet);
        }
예제 #9
0
        public static Model MultiLayerPerceptronModel()
        {
            var            images = Variable <float>(PartialShape.Create(-1, 28 * 28));
            ILayer <float> net    = new FullyConnected <float>(images, 128);

            net = new ActivationReLU <float>(net.Output);
            net = new FullyConnected <float>(net.Output, 64);
            net = new ActivationReLU <float>(net.Output);
            net = new FullyConnected <float>(net.Output, 10);
            var labels = Variable <float>(PartialShape.Create(-1, 10));

            return(new Model {
                Loss = new SoftmaxCrossEntropy <float>(net.Output, labels),
                Images = images,
                Labels = labels
            });
        }
예제 #10
0
        static void Main(string[] args)
        {
            Operations K = new Operations();

            //Load array to the tensor
            NDArray x = new NDArray(3, 3);

            x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5);
            x.Print("Load X Values");

            NDArray y = new NDArray(3, 1);

            y.Load(1, 0, 1);
            y.Print("Load Y Values");

            //Create two layers, one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid");

            //Connect input by passing data from one layer to another
            fc1.Forward(x);
            fc2.Forward(fc1.Output);
            var preds = fc2.Output;

            preds.Print("Predictions");

            //Calculate the mean square error cost between the predicted and expected values
            BaseCost cost       = new BinaryCrossEntropy();
            var      costValues = cost.Forward(preds, y);

            costValues.Print("BCE Cost");

            //Calculate the mean absolute metric value for the predicted vs expected values
            BaseMetric metric       = new BinaryAccuacy();
            var        metricValues = metric.Calculate(preds, y);

            metricValues.Print("Acc Metric");

            var grad = cost.Backward(preds, y);

            fc2.Backward(grad);
            fc1.Backward(fc2.InputGrad);
            fc1.PrintParams();

            Console.ReadLine();
        }
예제 #11
0
    private void DrawFullyConnectedLayers()
    {
        var featurevector = FullyConnected.Get(cnn.FeatureVector);
        var output        = FullyConnected.Get(cnn.Output);
        var weights       = FullyConnected.Get(cnn.Weights, false);
        var bias          = FullyConnected.Get(cnn.Bias, false);

        if (IsActivated)
        {
            RenderScaled(FeatureVector, featurevector);
            RenderScaled(Output, output);
            RenderScaled(Weights, weights);
            RenderScaled(NetworkBias, bias);
        }

        Throw(featurevector, output, weights, bias);
    }
예제 #12
0
        static void Main(string[] args)
        {
            Operations K = new Operations();

            //Load array to the tensor
            NDArray x = new NDArray(3, 3);

            x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5);
            x.Print("Load X Values");

            NDArray y = new NDArray(3, 1);

            y.Load(20, 15, 15);
            y.Print("Load Y Values");

            //Create two layers, one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "relu");

            //Connect input by passing data from one layer to another
            fc1.Forward(x);
            fc2.Forward(fc1.Output);
            var preds = fc2.Output;

            preds.Print("Predictions");

            //Calculate the mean square error cost between the predicted and expected values
            BaseCost cost       = new MeanSquaredError();
            var      costValues = cost.Forward(preds, y);

            costValues.Print("MSE Cost");

            //Calculate the mean absolute metric value for the predicted vs expected values
            BaseMetric metric       = new MeanAbsoluteError();
            var        metricValues = metric.Calculate(preds, y);

            metricValues.Print("MAE Metric");

            Console.ReadLine();
        }
예제 #13
0
            public Decoder(int sequenceLength, int vocabularySize, int wordVectorSize, int hiddenSize) : base(sequenceLength, sequenceLength * vocabularySize)
            {
                this.embedding      = new Embedding(sequenceLength, vocabularySize, wordVectorSize, (fanIn, fanOut) => 0.01 * Initializers.LeCunNormal(fanIn));
                this.recurrent      = new LSTM(wordVectorSize, hiddenSize, sequenceLength, true, false, (fanIn, fanOut) => Initializers.LeCunNormal(fanIn));
                this.attention      = new Attention(hiddenSize, sequenceLength);
                this.fullyConnected = new FullyConnected(hiddenSize * 2, sequenceLength, sequenceLength * vocabularySize, (fanIn, fanOut) => Initializers.LeCunNormal(fanIn));
                this.weights        = new double[this.embedding.Weights.Length + this.recurrent.Weights.Length + this.fullyConnected.Weights.Length];

                for (int i = 0; i < this.embedding.Weights.Length; i++)
                {
                    this.weights[i] = this.embedding.Weights[i];
                }

                for (int i = 0, j = this.embedding.Weights.Length; i < this.recurrent.Weights.Length; i++, j++)
                {
                    this.weights[j] = this.recurrent.Weights[i];
                }

                for (int i = 0, j = this.embedding.Weights.Length + this.recurrent.Weights.Length; i < this.fullyConnected.Weights.Length; i++, j++)
                {
                    this.weights[j] = this.fullyConnected.Weights[i];
                }
            }
예제 #14
0
        static void Main(string[] args)
        {
            //Load array to the tensor
            NDArray x = new NDArray(1, 3);

            x.Load(1, 2, 3);
            x.Print("Load array");

            //Create two layers, one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid");

            //Connect input by passing data from one layer to another
            fc1.Forward(x);
            x = fc1.Output;
            x.Print("FC1 Output");

            fc2.Forward(x);
            x = fc2.Output;
            x.Print("FC2 Output");

            Console.ReadLine();
        }
예제 #15
0
        private Layer ConvertInnerProduct(LayerParameter layerParam)
        {
            var input = _outputs[layerParam.Bottom[0]];
            var param = layerParam.InnerProductParam;

            var weights = LoadBlob(layerParam.Blobs[0]);

            if (input.Dimensions.Length == 4 && (input.Dimensions[2] != 1 || input.Dimensions[3] != 1))
            {
                var flatten = new Reshape(input.Dimensions, new[] { -1, input.Dimensions.GetSize() });
                var layer   = new FullyConnected(flatten.Output.Dimensions, weights, null, ActivationFunctionType.Linear);
                flatten.Input.SetConnection(input);
                layer.Input.SetConnection(flatten.Output);
                _outputs[layerParam.Top[0]] = layer.Output;
                return(layer);
            }
            else
            {
                var layer = new FullyConnected(input.Dimensions, weights, null, ActivationFunctionType.Linear);
                layer.Input.SetConnection(input);
                _outputs[layerParam.Top[0]] = layer.Output;
                return(layer);
            }
        }
예제 #16
0
파일: PTB.cs 프로젝트: vishalbelsare/AleaTK
            public Model(Context ctx, Config cfg, bool isTraining = true, bool usingCuDnn = true)
            {
                Config     = cfg;
                IsTraining = isTraining;
                UsingCuDnn = usingCuDnn;

                Inputs  = Variable <int>(PartialShape.Create(cfg.NumSteps, cfg.BatchSize));
                Targets = Variable <int>(PartialShape.Create(cfg.NumSteps, cfg.BatchSize));

                // embedding
                Embedding = new Embedding <float>(Inputs, cfg.VocabSize, cfg.HiddenSize, initScale: cfg.InitScale);

                // add dropout
                EmbeddedOutput = Embedding.Output;
                if (isTraining && cfg.KeepProb < 1.0)
                {
                    var dropout = new Dropout <float>(EmbeddedOutput, dropoutProb: 1.0 - cfg.KeepProb);
                    EmbeddedOutput = dropout.Output;
                }

                // rnn layer, dropout for intermediate lstm layers and for output
                if (usingCuDnn)
                {
                    RnnAccelerated = new Rnn <float>(new LstmRnnType(forgetBiasInit: 0.0), EmbeddedOutput, cfg.NumLayers, cfg.HiddenSize, isTraining: isTraining, dropout: isTraining && cfg.KeepProb < 1.0 ? 1.0 - Config.KeepProb : 0.0);
                    RnnOutput      = RnnAccelerated.Y;
                    if (isTraining && cfg.KeepProb < 1.0)
                    {
                        var dropout = new Dropout <float>(RnnOutput, dropoutProb: 1.0 - cfg.KeepProb);
                        RnnOutput = dropout.Output;
                    }
                }
                else
                {
                    RnnDirect = new Lstm <float> [cfg.NumLayers];
                    for (var i = 0; i < cfg.NumLayers; ++i)
                    {
                        var lstm = new Lstm <float>(i == 0 ? EmbeddedOutput : RnnOutput, cfg.HiddenSize, forgetBiasInit: 0.0);
                        RnnDirect[i] = lstm;
                        RnnOutput    = lstm.Y;
                        if (isTraining && cfg.KeepProb < 1.0)
                        {
                            var dropout = new Dropout <float>(RnnOutput, dropoutProb: 1.0 - cfg.KeepProb);
                            RnnOutput = dropout.Output;
                        }
                    }
                }

                FC = new FullyConnected <float>(RnnOutput.Reshape(RnnOutput.Shape[0] * RnnOutput.Shape[1], RnnOutput.Shape[2]), cfg.VocabSize);

                Loss = new SoftmaxCrossEntropySparse <float>(FC.Output, Targets.Reshape(Targets.Shape[0] * Targets.Shape[1]));

                Optimizer = new GradientDescentOptimizer(ctx, Loss.Loss, cfg.LearningRate, new GlobalNormGradientClipper(cfg.MaxGradNorm));

                // warmup to force JIT compilation to get timings without JIT overhead
                Optimizer.Initalize();
                ResetStates();
                Optimizer.AssignTensor(Inputs, Fill(Shape.Create(Inputs.Shape.AsArray), 0));
                Optimizer.AssignTensor(Targets, Fill(Shape.Create(Targets.Shape.AsArray), 0));
                Optimizer.Forward();
                if (isTraining)
                {
                    Optimizer.Backward();
                }

                // now reset states
                Optimizer.Initalize();
                ResetStates();
            }
예제 #17
0
파일: Network.cs 프로젝트: xuan2261/XNet
        public bool CreateLayer(int nCount, ELayerType type, ActivationSettings activationSettings)
        {
            Layer.Utility.Layer layer;
            switch (type)
            {
            case ELayerType.Invalid:
                throw new ArgumentException("Invalid \"type\" argument.");

            case ELayerType.AveragePooling:
                layer = new AveragePooling(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.AverageUnpooling:
                layer = new AverageUnpooling(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.Convolutional:
                layer = new Convolutional(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.Deconvolutional:
                layer = new Deconvolutional(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.Dropout:
                layer = new Dropout(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.FullyConnected:
                layer = new FullyConnected(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.GatedRecurrent:
                layer = new GatedRecurrent(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.LSTM:
                layer = new LSTM(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.MaxPooling:
                layer = new MaxPooling(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.MaxUnpooling:
                layer = new MaxUnpooling(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            case ELayerType.Recurrent:
                layer = new Recurrent(nCount, Layers.Count, activationSettings);
                Layers.Add(layer);
                return(true);

            default:
                throw new ArgumentException("Invalid \"type\" argument.");
            }
        }
예제 #18
0
        static void Main(string[] args)
        {
/*
 *          //
 *          //1. Test tensor and operations
 *          //
 *          Operations K = new Operations();
 *
 *          //Load array to the tensor
 *          NDArray a = new NDArray(3, 6);
 *          a.Load(1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 8, 7, 6, 5, 4, 3, 2, 1);
 *          a.Print("Load array");
 *
 *          //Transpose of the matrix
 *          NDArray t = a.Transpose();
 *          t.Print("Transpose");
 *
 *          //Create a tensor with all value 5
 *          NDArray b = new NDArray(6, 3);
 *          b.Fill(5);
 *          b.Print("Constant 5");
 *
 *          //Create a tensor with all value 3
 *          NDArray c = new NDArray(6, 3);
 *          c.Fill(3);
 *          c.Print("Constant 3");
 *
 *          // Subtract two tensor
 *          b = b - c;
 *
 *          //Perform dot product
 *          NDArray r = K.Dot(a, b);
 *          r.Print("Dot product");
 */
            //
            //2. Test layers and activations
            //

/*
 *          //Load array to the tensor
 *          NDArray x = new NDArray(1, 3);
 *          x.Load(1, 2, 3);
 *          x.Print("Load array");
 *
 *          //Create two layers, one with 6 neurons and another with 1
 *          FullyConnected fc1 = new FullyConnected(3, 6, "relu");
 *          FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid");
 *
 *          //Connect input by passing data from one layer to another
 *          fc1.Forward(x);
 *          x = fc1.Output;
 *          x.Print("FC1 Output");
 *
 *          fc2.Forward(x);
 *          x = fc2.Output;
 *          x.Print("FC2 Output");
 *
 */

            //
            //3. Test cost functions and metrics
            //

            //Load array to the tensor
            NDArray x = new NDArray(3, 3);

            x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5);
            x.Print("Load X values");

            NDArray y = new NDArray(3, 1);

            y.Load(20, 15, 15);
            y.Print("Load Y values");

            //Create two layers one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "relu");

            //Connect input by passing data from one layer to the other
            fc1.Forward(x);
            fc2.Forward(fc1.Output);
            var preds = fc2.Output;

            preds.Print("Predictions");

            //Calculate mean square error between predicted and expected values
            BaseCost cost       = new BinaryCrossEntropy();
            var      costValues = cost.Forward(preds, y);

            costValues.Print("BCE Cost");

            //Calculate the mean absolute value for the predicted vs expected values
            BaseMetric metric       = new BinaryAccuracy();
            var        metricValues = metric.Calculate(preds, y);

            metricValues.Print("Acc Metric");

            //Backpropagation starts here
            //Calculate gradient cost function
            var grad = cost.Backward(preds, y);

            //Then the fc2 layer by passing cost function grad into the layer backward function
            fc2.Backward(grad);
            //The grad of the fc2 is stored in the InputGrad property, pass it to fc1
            fc1.Backward(fc2.InputGrad);

            //Print parameters for both layers along with the Grad
            fc1.PrintParams();
            fc2.PrintParams();
        }
예제 #19
0
        static void Main(string[] args)
        {
            Operations K = new Operations();

            //Load array to the tensor
            NDArray x = new NDArray(3, 3);

            x.Load(2, 4, 6, 1, 3, 5, 2, 3, 5);
            x.Print("Load X Values");

            NDArray y = new NDArray(3, 1);

            y.Load(1, 0, 1);
            y.Print("Load Y Values");

            //Create two layers, one with 6 neurons and another with 1
            FullyConnected fc1 = new FullyConnected(3, 6, "relu");
            FullyConnected fc2 = new FullyConnected(6, 1, "sigmoid");

            //Connect input by passing data from one layer to another
            fc1.Forward(x);
            fc2.Forward(fc1.Output);
            var preds = fc2.Output;

            preds.Print("Predictions");

            //Calculate the mean square error cost between the predicted and expected values
            BaseCost cost       = new BinaryCrossEntropy();
            var      costValues = cost.Forward(preds, y);

            costValues.Print("BCE Cost");

            //Calculate the mean absolute metric value for the predicted vs expected values
            BaseMetric metric       = new BinaryAccuacy();
            var        metricValues = metric.Calculate(preds, y);

            metricValues.Print("Acc Metric");

            var grad = cost.Backward(preds, y);

            fc2.Backward(grad);
            fc1.Backward(fc2.InputGrad);

            Console.WriteLine("Param value for FC1 before ADAM optimization");
            fc1.PrintParams(printGrads: false);

            //Initialise ADAM optimizer with default learning rate of 0.01
            BaseOptimizer optimizer = BaseOptimizer.Get("adam");

            //Change the value of learning rate to see the jump is weight changes.
            //optimizer.LearningRate = 0.1;

            //Apply optimizer for the first layer for first iteration
            optimizer.Update(1, fc1);
            Console.WriteLine("Param value for FC1 after ADAM optimization");
            fc1.PrintParams(printGrads: false);

            Console.WriteLine("Param value for FC2 before ADAM optimization");
            fc2.PrintParams(printGrads: false);

            //Apply optimizer for the first layer for first iteration
            optimizer.Update(1, fc2);
            Console.WriteLine("Param value for FC2 after ADAM optimization");
            fc2.PrintParams(printGrads: false);

            Console.ReadLine();
        }