예제 #1
0
        public virtual unsafe void Minimize()
        {
            DeleteTerms();

            PreCheck();

            Index a = new Index(OuterShape);

            a.SetZero();

            for (int i = 0; i < OuterShape.TotalSize; i++, a.Increase(1))
            {
                GetTerm(a);
            }

            if (Terms.Count > 1)
            {
                Terms.Add min = new Terms.Add(Terms.ToArray());
                min.Minimize();
                min.DeleteResults();
            }
            else if (Terms.Count == 1)
            {
                Terms[0].Minimize();
            }


            DeleteTerms();
        }
예제 #2
0
        public static unsafe void bb6()
        {
            Variable v = new Variable(new Dimension[] { 6 }, new Shape(3));

            DynamicRecurrent r = new DynamicRecurrent(v.OuterDimensions, v.InnerDimensions, new[] { v },
                                                      (Layer me, List <Layer> x, Index t) =>
            {
                if (t[0] % 2 == 1)
                {
                    return(new Terms.Add(x[0].GetTerm(t), me.GetTerm(t - 1)));
                }
                else
                {
                    return(x[0].GetTerm(t));
                }
            });

            r.PreCheck();


            Index a = new Index(r.OuterShape);

            a.SetZero();

            for (int i = 0; i < r.OuterShape.TotalSize; i++, a.Increase(1))
            {
                Console.WriteLine(r.GetTerm(a).GetResult());
            }
        }
예제 #3
0
        public unsafe void MultiplyTerm()
        {
            Input  x    = new Input(4, 2, 1);
            Tensor data = new Tensor((10, 3, 4), DeviceConfig.Host_Float);

            for (int i = 0; i < data.Shape.TotalSize; i++)
            {
                ((float *)data.Array)[i] = i / 12;
            }
            x.SetInput(data);

            Index a = new Index(x.OuterShape);

            a.SetZero();


            for (int i = 0; i < x.OuterShape.TotalSize; i++, a.Increase(1))
            {
                Term t = x.GetTerm(a);
                Console.WriteLine("Term " + i + ": " + x.GetTerm(a).GetResult());
                Term mul = new Multiply(t, t);
                Console.WriteLine("Term " + i + ": " + mul.GetResult());
                mul.DeleteResults();
                //todo check the result.
            }
        }
예제 #4
0
        public static unsafe void bb5()
        {
            Variable v = new Variable(new[] { new Dimension(3) }, new Shape(1000));

            v.PreCheck();

            Index a = new Index(v.OuterShape);

            a.SetZero();

            Terms.ReLU r = new Terms.ReLU(v.GetTerm(a));

            //Console.WriteLine(v.GetTerm(a).GetResult());
            //Console.WriteLine(r.GetResult());

            r.DeleteResults();

            ((Terms.Variable)v.GetTerm(a)).Clean();
        }
예제 #5
0
        public unsafe override void CalculateDerivate(Tensor s)
        {
            if (Terms[0].ContainsTrainable) //todo kernelize this!
            {
                Tensor combined = new Tensor(Terms[0].Shape.Clone(), DeviceConfig.Host_Float);


                float *ptrcombined = (float *)combined.Array;
                float *ptrs        = (float *)s.Array;

                Index iterator = new Index(this.Terms[0].Shape);

                iterator.SetZero();

                for (int h = 0; h < this.Terms[0].Shape.TotalSize; h++)
                {
                    int indexs = 0;

                    for (int i = iterator.N - 1; i >= 0; i--)
                    {
                        if (iterator.Indices[i] == this.Terms[0].Shape[i])
                        {
                            iterator.Indices[i] = 0;
                            iterator.Indices[i - 1]++;
                        }
                        indexs += (iterator.Indices[i] / Divisor[i]) * this.Shape.Multiplied[i + 1];
                    }

                    ptrcombined[h] = ptrs[indexs];
                    iterator.Indices[iterator.N - 1]++;
                }

                //for (int i1 = 0; i1 < s.D1; i1++)
                //    for (int i2 = 0; i2 < s.D2; i2++)
                //        for (int i3 = 0; i3 < v1d1; i3++)
                //            for (int i4 = 0; i4 < v1d2; i4++)
                //                combined[i1, i2, i3, i4] += s[i1, i2, i3 / RowDivider, i4 / ColumnDivider];// * (m[i3 / RowDivider, i4 / ColumnDivider, i3, i4] = 1);

                Terms[0].Derivate(combined);
                combined.Dispose();
            }
        }
예제 #6
0
        public static void ExpandFloat32_GetGradient_0(Tensor res, Tensor s, Shape thisShape, Shape term0, Shape Multiplier)
        {
            res.SetValue(0);

            float *ptrcombined = (float *)res.Base.Array;
            float *ptrs        = (float *)s.Base.Array;

            if (Multiplier.N == 2 && Multiplier[1] == 1)
            {
                for (int i = 0; i < Multiplier[0]; i++)
                {
                    float *me = ((float *)s.Base.Array) + i * term0.TotalSize;
                    VectorizationFloat.ElementWiseAddAVX((float *)res.Base.Array, me, (float *)res.Base.Array, term0.TotalSize);
                }
            }
            else
            {
                Index iterator = new Index(thisShape);

                iterator.SetZero();

                for (int h = 0; h < thisShape.TotalSize; h++)
                {
                    long indexs = 0;

                    for (int i = iterator.N - 1; i >= 0; i--)
                    {
                        if (iterator.Indices[i] == thisShape[i])
                        {
                            iterator.Indices[i] = 0;
                            iterator.Indices[i - 1]++;
                        }
                        indexs += (iterator.Indices[i] / Multiplier[i]) * term0.Multiplied[i + 1];
                    }

                    ptrcombined[indexs] += ptrs[h];
                    iterator.Indices[iterator.N - 1]++;
                }
            }
        }
예제 #7
0
        public static unsafe void bb4()
        {
            Input x = new Input(4, 2, 1);

            var sum = new ShiftTime(x, new Dimension[] { -1 }); sum.Name = "sum";


            Stopwatch c = new Stopwatch();

            c.Start();

            for (int i2 = 0; i2 < 1; i2++)
            {
                Tensor data = new Tensor((10, 3, 4), DeviceConfig.Host_Float);

                for (int i = 0; i < data.Shape.TotalSize; i++)
                {
                    ((float *)data.Array)[i] = i / 12;
                }
                x.SetInput(data);

                sum.PreCheck();

                Index a = new Index(x.OuterShape);
                a.SetZero();

                for (int i = 0; i < x.OuterShape.TotalSize; i++, a.Increase(1))
                {
                    Console.WriteLine("Term " + i + ":" + sum.GetTerm(a).GetResult());
                }

                c.Restart();
                sum.Minimize();
                c.Stop();
                data.Dispose();
                Console.WriteLine($"{i2} took {c.ElapsedMilliseconds}ms");
            }
        }
예제 #8
0
        public unsafe static void XORExample()
        {
            //Hyperparameters
            Hyperparameters.LearningRate = 0.1f;
            Hyperparameters.Optimizer    = new SGD();

            //Model Creation
            var l1 = LayerBuilder.Dense(16, "sigmoid");
            var l2 = LayerBuilder.Dense(1, "sigmoid")[l1];


            var   x     = new Input(2);
            Layer model = l2[x];

            //Loss Function Creation
            var y    = new Input(1);
            var loss = LayerBuilder.SquaredError(model, y);


            //Data preparation
            Tensor x_train = new Tensor((1, 4, 2), DeviceConfig.Host_Float);
            Tensor y_train = new Tensor((1, 4, 1), DeviceConfig.Host_Float);

            float *xt = (float *)x_train.Array;
            float *yt = (float *)y_train.Array;

            // 1,1 = 0
            // 1,0 = 1
            // 0,1 = 1
            // 0,0 = 0

            xt[0] = 1; xt[1] = 1;
            xt[2] = 1; xt[3] = 0;
            xt[4] = 0; xt[5] = 1;
            xt[6] = 0; xt[7] = 0;

            yt[0] = 0;
            yt[1] = 1;
            yt[2] = 1;
            yt[3] = 0;

            //Give data to the model
            x.SetInput(x_train);
            y.SetInput(y_train);

            Stopwatch s = new Stopwatch();

            s.Start();
            //Minimizing
            loss.PreCheck();
            Index a = new Index(model.OuterShape);

            a.SetZero();

            for (int epoch = 0; epoch < 100000; epoch++)
            {
                loss.Minimize();
                if (epoch % 5000 == 0)
                {
                    float res = ((float *)loss.GetTerm(a).GetResult().Array)[0];
                    res += ((float *)loss.GetTerm(a).GetResult().Array)[1];
                    res += ((float *)loss.GetTerm(a).GetResult().Array)[2];
                    res += ((float *)loss.GetTerm(a).GetResult().Array)[3];
                    Console.WriteLine(res);
                }
            }
            s.Stop();
            Console.WriteLine("Time Elapsed: " + s.ElapsedMilliseconds);

            //Print Pools
            PrintPools();

            //Print the results

            var result = model.GetTerm(a).GetResult();

            Console.WriteLine("Results: " + result);


            //Print the results of clone model
            Input x2 = new Input(2);

            x2.SetInput(x_train);
            var clonemodel = l2[x2];

            clonemodel.PreCheck();
            var result2 = clonemodel.GetTerm(a).GetResult();

            Console.WriteLine("Results: " + result2);

            clonemodel.DeleteTerms();
            model.DeleteTerms();
        }
예제 #9
0
        public unsafe static void MNISTExample()
        {
            //Hyperparameters
            Hyperparameters.LearningRate = 0.001f;
            Hyperparameters.Optimizer    = new SGD();


            //Model Creation
            var x = new Input(784);
            //var dropout = new Dropout(x, 0.1f);
            //var model = LayerBuilder.Dense(500, x, "relu");
            var model = LayerBuilder.Dense(100, x, "relu");

            model = LayerBuilder.Dense(400, model, "relu");
            model = LayerBuilder.Dense(200, model, "relu");
            model = LayerBuilder.Dense(100, model, "relu");
            model = LayerBuilder.Dense(10, model, "softmax");


            //Loss Function Creation
            var y    = new Input(10);
            var loss = LayerBuilder.SquaredError(model, y);


            //Data preparation
            (float[,] traindata, float[,] labels) = LoadMNISTDataSet();
            int mnistsize = 42000;

            Tensor x_train = Tensor.LoadArrayToDisposedTensor(traindata, new Shape(mnistsize, 784), DeviceConfig.Host_Float);
            Tensor y_train = Tensor.LoadArrayToDisposedTensor(labels, new Shape(mnistsize, 10), DeviceConfig.Host_Float);

            //Training
            int batchsize = 100;
            int trainl    = 41000;

            Stopwatch s = new Stopwatch();

            for (int epoch = 0; epoch < 35; epoch++)
            {
                float l   = 0;
                float val = 0;

                s.Restart();
                Console.WriteLine("Epoch " + epoch + " başladı.");
                for (int batch = 0; batch < trainl / batchsize; batch++)
                {
                    Tensor batchx = Tensor.Cut(x_train, batch * (batchsize * 784), new Shape(1, batchsize, 784));
                    Tensor batchy = Tensor.Cut(y_train, batch * (batchsize * 10), new Shape(1, batchsize, 10));

                    x.SetInput(batchx);
                    y.SetInput(batchy);

                    loss.Minimize();

                    Index zero = new Index(loss.OuterShape);
                    zero.SetZero();

                    Tensor res = loss.GetTerm(zero).GetResult();
                    float *pp  = (float *)res.Array;

                    for (int i = 0; i < res.Shape.TotalSize; i++)
                    {
                        l += pp[i];
                    }
                }

                for (int batch = trainl / batchsize; batch < mnistsize / batchsize; batch++)
                {
                    Tensor batchx = Tensor.Cut(x_train, batch * (batchsize * 784), new Shape(1, batchsize, 784));
                    Tensor batchy = Tensor.Cut(y_train, batch * (batchsize * 10), new Shape(1, batchsize, 10));

                    model.DeleteTerms();

                    x.SetInput(batchx);
                    y.SetInput(batchy);

                    Index zero = new Index(model.OuterShape);
                    zero.SetZero();
                    model.PreCheck();
                    Tensor res = model.GetTerm(zero).GetResult();

                    for (int i = 0; i < batchsize; i++)
                    {
                        int myans      = MaxId((float *)res.Array + i * 10);
                        int correctres = MaxId((float *)batchy.Array + i * 10);
                        val += (myans == correctres ? 1 : 0);
                    }
                }
                s.Stop();

                Console.WriteLine("Epoch " + epoch + " biti.");
                Console.WriteLine("Loss: " + l / trainl);
                Console.WriteLine("Validation: " + val / (mnistsize - trainl));
                Console.WriteLine("Time: " + s.ElapsedMilliseconds + "ms");
            }

            PrintPools();

            while (true)
            {
                try
                {
                    float[] data   = LoadCurrentImage();
                    Tensor  x_test = Tensor.LoadArrayToDisposedTensor(data, new Shape(1, 1, 784), DeviceConfig.Host_Float);

                    model.DeleteTerms();

                    x.SetInput(x_test);

                    Index zero = new Index(model.OuterShape);
                    zero.SetZero();
                    model.PreCheck();
                    Tensor res = model.GetTerm(zero).GetResult();

                    Console.WriteLine("Result: " + res);
                    Console.WriteLine("Digit Prediction: " + MaxId((float *)res.Array));
                    Console.WriteLine("-----------");
                }
                catch (Exception)
                {
                }
                Thread.Sleep(500);
            }
        }