Пример #1
0
    // Start is called before the first frame update
    IEnumerator Start()
    {
        var r = new System.Random(2);

        var x = (Matrix) new double[1000, 1];

        Matrix.MatrixLoop((i, j) =>
        {
            x[i, 0] = i;
        }, x.X, x.Y);

        var y = (Matrix) new double[1000, 1];

        Matrix.MatrixLoop((i, j) =>
        {
            y[i, 0] = i * 12 + 15 + r.Next(10);
        }, x.X, x.Y);

        // var x = new double[,] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } };
        // var y = new double[,] { { 0 }, { 1 }, { 0 }, { 1 } };

        var X = new Tensor(x, true);
        var Y = new Tensor(y, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(1, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.001);

        var mse = new MeanSquaredError();

        for (var i = 0; i < 10000; i++)
        {
            yield return(null);

            var pred = seq.Forward(X);
            print(pred.Data.Size);
            var loss = mse.Forward(pred, Y);

            loss.Backward();
            sgd.Step();
            print($"Epoch: {i} Loss: {loss.Data[0, 0]}");
            print(Y);
            print(pred);
        }

        print(seq.Forward(new Tensor(x)));
    }
Пример #2
0
    static void ThirdNN()
    {
        var r = new Random();

        var data = new Tensor((Matrix) new double[, ] {
            { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
        }, true);
        var target = new Tensor((Matrix) new double[, ] {
            { 0 }, { 1 }, { 0 }, { 1 }
        }, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(2, 3, r));
        seq.Layers.Add(new Linear(3, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.1f);

        for (var i = 0; i < 10; i++)
        {
            var pred = seq.Forward(data);

            var diff = pred.Sub(target);
            var loss = diff.Mul(diff).Sum(AxisZero.vertical);

            loss.Backward(new Tensor(Matrix.Ones(loss.Data.X, loss.Data.Y)));
            sgd.Step();

            Console.WriteLine($"Epoch: {i} Loss: {loss}");
        }
    }
Пример #3
0
    static void FourthNN()
    {
        var r = new Random();

        var data = new Tensor((Matrix) new double[, ] {
            { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
        }, true);
        var target = new Tensor((Matrix) new double[, ] {
            { 0 }, { 1 }, { 0 }, { 1 }
        }, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(2, 3, r));
        seq.Layers.Add(new Linear(3, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.1f);

        var mse = new MeanSquaredError();

        for (var i = 0; i < 10; i++)
        {
            var pred = seq.Forward(data);

            var loss = mse.Forward(pred, target);

            loss.Backward(new Tensor(Matrix.Ones(loss.Data.X, loss.Data.Y)));
            sgd.Step();

            Console.WriteLine($"Epoch: {i} Loss: {loss}");
        }
    }
            public float[] Forward(float[] x)
            {
                flat_mem_stack = Utils.FlattenArray(mem_stack);

                float[] inputPlusMemory = Utils.ConcatArray(x, flat_mem_stack);

                float[] output = h_layers.Forward(inputPlusMemory);

                // setting the mem Stack
                float[,] mem2 = new float[n_memory, n_outputs + x.Length];

                for (int i = 0; i < n_memory; i++)
                {
                    for (int j = 0; j < n_outputs + x.Length; j++)
                    {
                        try {
                            mem2[i + 1, j] = mem_stack[i, j];
                        }
                        catch (System.Exception) {
                            break;
                        }
                    }
                }
                float[] outputPlusInput = Utils.ConcatArray(x, output);
                for (int i = 0; i < outputPlusInput.Length; i++)
                {
                    mem2[0, i] = outputPlusInput[i];
                }

                mem_stack = mem2;

                return(output);
            }
Пример #5
0
            public override TorchTensor Forward(TorchTensor input)
            {
                using (var f = features.Forward(input))
                    using (var avg = avgPool.Forward(f))

                        using (var x = avg.View(new long[] { avg.Shape[0], 256 * 2 * 2 }))
                            return(classifier.Forward(x));
            }
Пример #6
0
        public unsafe static void XORExampleNew()
        {
            //Hyperparameters
            Hyperparameters.LearningRate = 0.1f;
            Hyperparameters.Optimizer    = new SGD();

            //Model Creation
            //var l1 = new DenseModule(2, 16, "sigmoid");
            //var model = new DenseModule(16, 1, "sigmoid", l1);

            var model = new Sequential(
                new DenseModule(2, 16, "sigmoid"),
                new DenseModule(16, 1, "sigmoid")
                );


            //Data preparation
            Tensor x_train = new Tensor((4, 2), DeviceConfig.Host_Float);
            Tensor y_train = new Tensor((4, 1), DeviceConfig.Host_Float);

            float *xt = (float *)x_train.Array;
            float *yt = (float *)y_train.Array;

            // 1,1 = 0
            // 1,0 = 1
            // 0,1 = 1
            // 0,0 = 0

            xt[0] = 1; xt[1] = 1;
            xt[2] = 1; xt[3] = 0;
            xt[4] = 0; xt[5] = 1;
            xt[6] = 0; xt[7] = 0;

            yt[0] = 0;
            yt[1] = 1;
            yt[2] = 1;
            yt[3] = 0;

            //Give data to the model
            Terms.Term[] data  = new Terms.Term[] { new Terms.Variable(x_train) };
            Terms.Term[] label = new Terms.Term[] { new Terms.Variable(y_train) };

            Terms.Term[] res = model.Forward(data);
            WriteLine(res[0].GetResult());
        }
Пример #7
0
        // Evaluate the model on the test set. Prints the total number of training samples that are classified correctly
        private static void EvaluateModel(Sequential model, DataSet testingSet, int numInputs)
        {
            float totalCorrect = 0;

            for (int batchStart = 0; batchStart <= testingSet.inputs.Shape[0] - BatchSize; batchStart += BatchSize)
            {
                using (var mbInputs = testingSet.inputs.Narrow(0, batchStart, BatchSize))
                    using (var mbTargetValues = testingSet.targetValues.Narrow(0, batchStart, BatchSize))
                    {
                        var modelOutput = model.Forward(mbInputs, ModelMode.Evaluate);

                        totalCorrect += (modelOutput.TVar().Argmax(1) == mbTargetValues)
                                        .SumAll()
                                        .ToScalar()
                                        .Evaluate();
                    }
            }

            Console.WriteLine("Test set total correct: " + totalCorrect + " / " + testingSet.inputs.Shape[0]);
        }
Пример #8
0
        // Runs a single epoch of training.
        private static void TrainEpoch(Sequential model, ICriterion criterion, SgdOptimizer optim, DataSet trainingSet, int numInputs, bool useTargetClasses)
        {
            using (new SimpleTimer("Training epoch completed in {0}ms"))
            {
                for (int batchStart = 0; batchStart <= trainingSet.inputs.Shape[0] - BatchSize; batchStart += BatchSize)
                {
                    Console.Write(".");

                    var grad = new GradFunc(parameters =>
                    {
                        using (var mbInputs = trainingSet.inputs.Narrow(0, batchStart, BatchSize))
                            using (var mbTargets = trainingSet.targets.Narrow(0, batchStart, BatchSize))
                                using (var mbTargetClasses = trainingSet.targetValues.Narrow(0, batchStart, BatchSize))
                                {
                                    foreach (var gradNDArray in model.GetGradParameters())
                                    {
                                        Ops.Fill(gradNDArray, 0);
                                    }

                                    var modelOutput     = model.Forward(mbInputs, ModelMode.Train);
                                    var criterionOutput = criterion.UpdateOutput(modelOutput, useTargetClasses ? mbTargetClasses : mbTargets);


                                    var criterionGradIn = criterion.UpdateGradInput(modelOutput, useTargetClasses ? mbTargetClasses : mbTargets);
                                    model.Backward(mbInputs, criterionGradIn, ModelMode.Train);

                                    return(new OutputAndGrads()
                                    {
                                        output = modelOutput, grads = model.GetGradParameters().ToArray()
                                    });
                                }
                    });

                    optim.Update(grad, model.GetParameters().ToArray());
                }
            }
            Console.WriteLine();
        }
Пример #9
0
        public void AdaDeltaTest()
        {
            var net = new Sequential(new QuadraticCost(), new AdaDelta(0.03));

            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Dense(3, new Tanh()));
            net.CreateLayer(new Output(1, new Tanh()));

            net.InitNetwork();

            var inputs  = new List <Matrix>();
            var outputs = new List <Matrix>();

            // 0 0 0    => 0
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 0.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 0 0 1    => 1
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 0.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 0 1 0    => 1
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 1.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 0 1 1    => 0
            inputs.Add(new Matrix(new[, ] {
                { 0.0 }, { 1.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 1 0 0    => 1
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 0.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            // 1 0 1    => 0
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 0.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 1 1 0    => 0
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 1.0 }, { 0.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 0.0 }
            }));

            // 1 1 1    => 1
            inputs.Add(new Matrix(new[, ] {
                { 1.0 }, { 1.0 }, { 1.0 }
            }));
            outputs.Add(new Matrix(new[, ] {
                { 1.0 }
            }));

            for (var i = 0; i < 8; i++)
            {
                net.Train(inputs[i % 8], outputs[i % 8]);
            }

            var correct = 0;

            for (var i = 0; i < 10; i++)
            {
                correct += Math.Abs(net.Forward(inputs[0])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[1])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[2])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[3])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[4])[0, 0]) - 1 < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[5])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[6])[0, 0]) < 0.1 ? 1 : 0;
                correct += Math.Abs(net.Forward(inputs[7])[0, 0]) - 1 < 0.1 ? 1 : 0;
            }
            var acc = correct / 80.0 * 100.0;

            Trace.WriteLine(" Acc: " + acc);
            Assert.IsTrue(acc > 80.0, "Network did not learn XOR");
        }
Пример #10
0
    private void Train()
    {
        var array = new int[totalInputList.Count];

        for (int i = 0; i < totalInputList.Count; i++)
        {
            array[i] = i;
        }

        System.Array.Sort <int>(array, new System.Comparison <int>(
                                    (i1, i2) => totalInputList[i2].Count.CompareTo(totalInputList[i1].Count)));

        var newArray = new int[getFirst];
        var regCount = 0;

        for (int i = 0; i < getFirst; i++)
        {
            newArray[i] = i;
            regCount   += totalOutputList[i].Count;
        }

        var MatrixX = new double[regCount, 4];
        var MatrixY = new double[regCount, 1];

        var x = 0;

        for (int i = 0; i < newArray.Length; i++)
        {
            for (int j = 0; j < totalInputList[i].Count; j++)
            {
                MatrixX[x, 0] = totalInputList[newArray[i]][j][0, 0];
                MatrixX[x, 1] = totalInputList[newArray[i]][j][0, 1];
                MatrixX[x, 2] = totalInputList[newArray[i]][j][0, 2];
                MatrixX[x, 3] = totalInputList[newArray[i]][j][0, 3];

                MatrixY[x, 0] = totalOutputList[newArray[i]][j][0, 0];

                x++;
            }
        }

        var X = new Tensor(MatrixX, true);
        var Y = new Tensor(MatrixY, true);

        for (var i = 0; i < fittingEpoch; i++)
        {
            var pred = seq.Forward(X);
            var loss = mse.Forward(pred, Y);
            loss.Backward();
            sgd.Step();

            if (double.IsNaN(loss.Data[0, 0]))
            {
                Debug.LogError("LOSS IS NAN");
                Debug.Break();
            }

            print("Epoch: " + i + " Loss: " + loss.Data[0, 0]);
        }

        totalInputList.Clear();
        totalOutputList.Clear();
    }
Пример #11
0
        public void DenseMnist()
        {
            // Load Train Data
            var lines = File.ReadAllLines("..\\..\\..\\..\\..\\datasets\\mnist_train.csv").ToList();

            var mnistLables = new List <Matrix>();
            var mnistData   = new List <Matrix>();

            for (var j = 1; j < lines.Count; j++)
            {
                var t    = lines[j];
                var data = t.Split(',').ToList();
                mnistLables.Add(new Matrix(10, 1).Fill(0));
                mnistLables[j - 1][int.Parse(data[0]), 0] = 1.0;

                var mnist = new Matrix(784, 1);
                for (var i = 1; i < data.Count; i++)
                {
                    mnist[i - 1, 0] = double.Parse(data[i]);
                }

                mnistData.Add(mnist);
            }

            // Load Test Data
            var testlines = File.ReadAllLines("..\\..\\..\\..\\..\\datasets\\mnist_test.csv").ToList();

            var mnistTestLables = new List <Matrix>();
            var mnistTestData   = new List <Matrix>();

            for (var j = 1; j < testlines.Count; j++)
            {
                var t    = testlines[j];
                var data = t.Split(',').ToList();
                mnistTestLables.Add(new Matrix(10, 1).Fill(0));
                mnistTestLables[j - 1][int.Parse(data[0]), 0] = 1.0;

                var mnist = new Matrix(784, 1);
                for (var i = 1; i < data.Count; i++)
                {
                    mnist[i - 1, 0] = double.Parse(data[i]);
                }

                mnistTestData.Add(mnist);
            }

            // Create Network
            var net = new Sequential(new CategoricalCrossEntropy(), new Adam(0.3), null, 128);

            net.CreateLayer(new Dense(784, new Relu()));
            net.CreateLayer(new Dense(128, new Relu()));
            net.CreateLayer(new Output(10, new Softmax()));
            net.InitNetwork();

            // Train Network
            for (var i = 0; i < mnistData.Count / 100; i++)
            {
                net.Train(mnistData[i % mnistData.Count], mnistLables[i % mnistData.Count]);
            }

            // Test Network
            for (var i = 0; i < mnistTestData.Count / 100; i++)
            {
                var mat  = net.Forward(mnistTestData[i % mnistTestData.Count]);
                var matx = mnistTestLables[i % mnistTestData.Count];
            }

            Trace.WriteLine(" Metrics Accuracy: ");
            //Assert.IsTrue(acc > 80.0, "Network did not learn MNIST");
        }