Beispiel #1
0
        public string TrainModel(IpfsJob job)
        {
            var tmpInput  = Ipfs.Get <JToken>(job.input);
            var tmpTarget = Ipfs.Get <JToken>(job.target);

            var seq = CreateSequential(job.Model);

            var inputData   = tmpInput.SelectToken("data").ToObject <float[]>();
            var inputShape  = tmpInput.SelectToken("shape").ToObject <int[]>();
            var inputTensor = controller.floatTensorFactory.Create(_data: inputData, _shape: inputShape, _autograd: true);

            var targetData   = tmpTarget.SelectToken("data").ToObject <float[]>();
            var targetShape  = tmpTarget.SelectToken("shape").ToObject <int[]>();
            var targetTensor = controller.floatTensorFactory.Create(_data: targetData, _shape: targetShape, _autograd: true);

            var grad = controller.floatTensorFactory.Create(_data: new float[] { 1, 1, 1, 1 },
                                                            _shape: new int[] { 4, 1 });

            Loss loss;

            switch (job.config.criterion)
            {
            case "mseloss":
                loss = new MSELoss(this.controller);
                break;

            case "categorical_crossentropy":
                loss = new CategoricalCrossEntropyLoss(this.controller);
                break;

            case "cross_entropy_loss":
                loss = new CrossEntropyLoss(this.controller, 1);     // TODO -- real value
                break;

            case "nll_loss":
                loss = new NLLLoss(this.controller);
                break;

            default:
                loss = new MSELoss(this.controller);
                break;
            }

            var optimizer = new SGD(this.controller, seq.getParameters(), job.config.lr, 0, 0);

            for (var i = 0; i < job.config.iters; ++i)
            {
                var pred = seq.Forward(inputTensor);
                var l    = loss.Forward(pred, targetTensor);
                l.Backward();

                // TODO -- better batch size
                optimizer.Step(100, i);
            }

            var resultJob = new Ipfs();
            var response  = resultJob.Write(new IpfsJob(job.input, job.target, seq.GetConfig(), job.config));

            return(response.Hash);
        }
Beispiel #2
0
        public void MSELossBackward()
        {
            // arrange
            var t1 = new Tensor(new double[, , , ] {
                { { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } } }
            });
            var t2 = new Tensor(new double[, , , ] {
                { { { 1, 0, 0 }, { 0, 1, 0 }, { 0, 0, 1 } } }
            });
            var expected = new double[, , , ] {
                { { { 0, 4, 6 }, { 8, 8, 12 }, { 14, 16, 16 } } }
            };
            var loss = new MSELoss();

            loss.ForwardPass(t1, t2);

            // act
            var actual = loss.BackwardPass();

            // assert
            for (int r = 0; r < 3; r++)
            {
                for (int c = 0; c < 3; c++)
                {
                    Assert.Equal(expected[0, 0, r, c], actual[0, 0, r, c]);
                }
            }
        }
Beispiel #3
0
        static void  test2D()
        {
            float[][][,] x = JsonConvert.DeserializeObject <float[][][, ]>(getstr("D:\\x.json"));
            float[][][,] y = JsonConvert.DeserializeObject <float[][][, ]>(getstr("D:\\y.json"));
            float[][][,] w = JsonConvert.DeserializeObject <float[][][, ]>(getstr("D:\\w1.json"));
            float[] wb = JsonConvert.DeserializeObject <float[]>(getstr("D:\\w2.json"));

            Conv2DLayer cl = new Conv2DLayer(1, 1, 3, 1, 2, false);

            MSELoss mloss = new MSELoss();

            cl.weights = new float[w.GetLength(0)][][, ];
            for (int a = 0; a < w.GetLength(0); a++)
            {
                cl.weights[a] = new float[w[0].GetLength(0)][, ];
                for (int b = 0; b < w[a].GetLength(0); b++)
                {
                    cl.weights[a][b] = new float[0, 0];
                    cl.weights[a][b] = w[a][b];
                }
            }
            cl.basicData = wb;
            //向前传播
            dynamic temp = cl.Forward(x);

            SigmodLayer sl = new SigmodLayer();

            temp = sl.Forward(temp);

            //TanhLayer tl = new TanhLayer();
            //temp = tl.forward(temp);

            //MulLayer ml = new MulLayer();
            //temp = ml.forward(temp, y);

            float loss = mloss.Forward(temp, y);


            //向后传播
            dynamic grad = mloss.Backward();//计算误差梯度

            grad = sl.Backward(grad);
            //grad = ml.backward(grad);



            //grad = tl.backward(grad);

            dynamic grad3 = cl.backward(grad);//卷积计算在 所有计算的最后面进行

            prirt(grad3.grid);
            prirt(grad3.basic);
        }
Beispiel #4
0
        public void MSELossForward()
        {
            // arrange
            var t1 = new Tensor(new double[, , , ] {
                { { { 1, 2, 3 }, { 4, 5, 6 }, { 7, 8, 9 } } }
            });
            var t2 = new Tensor(new double[, , , ] {
                { { { 1, 0, 0 }, { 0, 1, 0 }, { 0, 0, 1 } } }
            });
            var expected = 258D;
            var loss     = new MSELoss();

            // act
            var actual = loss.ForwardPass(t1, t2);

            // assert
            Assert.Equal(expected, actual);
        }
Beispiel #5
0
        static void Main()
        {
            string rootFolderPath  = Environment.CurrentDirectory;
            string weightsSavePath = $"{rootFolderPath}/weights.txt";

            string trainDataPath   = $"{rootFolderPath}/trainData.txt";
            string trainLabelsPath = $"{rootFolderPath}/trainLabels.txt";

            string testDataPath   = $"{rootFolderPath}/testData.txt";
            string testLabelsPath = $"{rootFolderPath}/testLabels.txt";

            var trainDataset = new Dataset(
                pathToData: trainDataPath,
                pathToLabels: trainLabelsPath,
                sideSize: 4
                );

            var testDataset = new Dataset(
                pathToData: testDataPath,
                pathToLabels: testLabelsPath,
                sideSize: 4
                );

            var net  = CreateTTTNet();
            var loss = new MSELoss();

            Train(
                net: net,
                lossFn: loss,
                dataset: trainDataset,
                validationRatio: 0.2f
                );

            var weightsString = net.DumpStateToString(precision: Config.WEIGHTS_PRECISION);

            net = CreateTTTNet(weightsString);

            Test(
                net: net,
                lossFn: loss,
                dataset: testDataset
                );
        }
Beispiel #6
0
        static void Main(string[] args)
        {
            string[] files = System.IO.Directory.GetFiles("res");
            files = files.OrderBy(p => p).ToArray();
            float[][][,] datax = new float[1][][, ];
            float[][][,] datah = new float[1][][, ];
            float[][][,] datac = new float[1][][, ];
            float[][][,] datay = new float[1][][, ];
            datax[0]           = new float[10][, ];
            datah[0]           = new float[10][, ];
            datac[0]           = new float[10][, ];
            datay[0]           = new float[10][, ];
            MSELoss  mloss    = new MSELoss();
            ConvLSTM convLSTM = new ConvLSTM(10, 10, 5);

            for (int r = 0; r < files.Length - 10; r++)
            {
                for (int t = 0; t < 10; t++)
                {
                    string file = files[r + t];
                    float[,] anno1 = DenseCRF.util.readRADARMatrix(file);
                    datax[0][t]    = anno1;
                    datah[0][t]    = new float[anno1.GetLength(0), anno1.GetLength(1)];
                    datac[0][t]    = new float[anno1.GetLength(0), anno1.GetLength(1)];
                    file           = files[r + t + 10];
                    datay[0][t]    = DenseCRF.util.readRADARMatrix(file);
                }

                var star = DateTime.Now;
                var(h_next, c_next) = convLSTM.Forward(datax, datah, datac);
                var loss = mloss.Forward(h_next, datay);
                Console.WriteLine("误差:" + loss);
                var grid  = mloss.Backward();
                var grid2 = convLSTM.backward(grid);
                convLSTM.update();
                var end = DateTime.Now;
                Console.WriteLine((end - star).TotalMilliseconds);
            }
        }
Beispiel #7
0
        public void TestModelCanLearn()
        {
            float[] inputData   = { 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1 };
            int[]   inputShape  = { 4, 3 };
            var     inputTensor = new Syft.Tensor.FloatTensor(ctrl, _data: inputData, _shape: inputShape, _autograd: true);

            float[] targetData   = { 0, 0, 1, 1 };
            int[]   targetShape  = { 4, 1 };
            var     targetTensor = new Syft.Tensor.FloatTensor(ctrl, _data: targetData, _shape: targetShape, _autograd: true);

            var model = new Syft.Layer.Model(
                new Linear(ctrl, 3, 4),
                new Sigmoid(),
                new Linear(ctrl, 4, 1),
                new Sigmoid()
                );

            float currentLoss = 1;

            // train the model
            for (var i = 0; i < 10; ++i)
            {
                var prediction = model.Predict(inputTensor);
                var loss       = MSELoss.Value(prediction, targetTensor);
                loss.Backward();

                foreach (var layer in model.Layers)
                {
                    var weight = layer.GetWeights();
                    weight?.Sub(weight.Grad.Transpose(), true);
                }

                currentLoss = loss.Data.Sum();
            }

            Assert.True(Math.Round(currentLoss, 5) <= 0.20936);
        }
Beispiel #8
0
        static void Main(string[] args)
        {
            Sequential xornet = new Sequential(
                new Linear(2, 100),
                new ReLU(),
                new Linear(100, 1));

            double[,] x = { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } };
            double[,] y = { { 0 }, { 1 }, { 1 }, { 0 } };

            NDArray a_np = np.array(x);
            NDArray b_np = np.array(y);

            Tensor input = new Tensor(a_np);
            Tensor label = new Tensor(b_np);

            int     epoch = 1000;
            SGD     optim = new SGD(xornet.parameters(), 0.05);
            MSELoss mse   = new MSELoss();

            for (int i = 1; i <= epoch; i++)
            {
                Tensor output = xornet.forward(input);
                Tensor loss   = mse.forward(output, label);
                optim.zero_grad();
                loss.backward();
                optim.step();
                Console.WriteLine("[+] Epoch: " + i + " Loss: " + loss);
            }

            Tensor z       = new Tensor(new NDArray(x));
            Tensor outputs = xornet.forward(z);

            Console.WriteLine("Result: " + outputs.data.flatten().ToString());
            Console.ReadLine();
        }
Beispiel #9
0
        /// <summary>
        /// BP网络测试
        /// </summary>
        static void BP()
        {
            float[][] x  = JsonConvert.DeserializeObject <float[][]>(util.getstr("D:\\bpx.json")); //训练数据
            float[][] y  = JsonConvert.DeserializeObject <float[][]>(util.getstr("D:\\bpy.json")); //训练标签
            float[][] w1 = JsonConvert.DeserializeObject <float[][]>(util.getstr("D:\\bpw.json"));


            ConvLayer cl1 = new ConvLayer(13, 5, true);

            cl1.weights = w1;
            SigmodLayer sl  = new SigmodLayer();
            float       lr  = 0.5f;
            ConvLayer   cl2 = new ConvLayer(5, 1, true);
            //SigmodLayer s2 = new SigmodLayer();
            int i = 0, a = 0;

            while (a < 5000)
            {
                //i = 0;
                //while (i < 100)
                //{
                //    float[][] xx2 = new float[1][];
                //    xx2[0] = new float[x[0].GetLength(0)];

                //    for (var f = 0; f < x[0].GetLength(0); f++)
                //    {

                //        xx2[0][f] = x[i][f];
                //    }
                dynamic ff = cl1.Forward(x);
                ff = sl.Forward(ff);
                ff = cl2.Forward(ff);
                // dynamic ff22 = s2.forward(ff);
                //计算误差
                MSELoss mloss = new MSELoss();
                //float[][] yy2= new float[1][];
                //yy2[0] = y[i];
                var loss = mloss.Forward(ff, y);

                Console.WriteLine("误差:" + loss);

                dynamic grid = mloss.Backward();

                //反传播w2
                //  dynamic grid2 =s2.backward(grid);
                dynamic w22 = cl2.backweight(grid);

                //反传播W1
                dynamic grid1 = cl2.backward(grid);
                grid1 = sl.Backward(grid1);
                dynamic w11 = cl1.backweight(grid1);


                cl2.weights   = Matrix.MatrixSub(cl2.weights, Matrix.multiply(w22.grid, lr));
                cl2.basicData = Matrix.MatrixSub(cl2.basicData, Matrix.multiply(w22.basic, lr));

                cl1.weights   = Matrix.MatrixSub(cl1.weights, Matrix.multiply(w11.grid, lr));
                cl1.basicData = Matrix.MatrixSub(cl1.basicData, Matrix.multiply(w11.basic, lr));
                i++;
                // }
                a++;
            }

            //测试网络
            float[][] xx = new float[1][];
            xx[0] = new float[x[0].GetLength(0)];
            var aa = 3;

            for (var f = 0; f < x[0].GetLength(0); f++)
            {
                xx[0][f] = x[aa][f];
            }
            dynamic ff2 = cl1.Forward(xx);

            ff2 = sl.Forward(ff2);
            ff2 = cl2.Forward(ff2);

            util.prirt(ff2);
            util.prirt(y[aa]);
        }
Beispiel #10
0
        static void 单层()
        {
            // float[][][] prev_state = new float[2][][];
            LSTMCELL lstm1 = new LSTMCELL(7, 1);
            LSTMCELL lstm2 = new LSTMCELL(10, 1);
            //var x_numpy = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\x_numpy.json"));
            //var h_numpy = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\h_numpy.json"));
            //var c_numpy = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\c_numpy.json"));
            //var dh_numpy = JsonConvert.DeserializeObject<float[][]>(util.getstr("D:\\dh_numpy.json"));
            var x_numpy  = new float[1][];
            var h_numpy  = new float[1][];
            var c_numpy  = new float[1][];
            var h_numpy2 = new float[1][];
            var c_numpy2 = new float[1][];

            var dh_numpy = new float[1][];
            var dataall  = getdata();

            for (int i = 0; i < 30000; i++)
            {
                for (int j = 0; j < 1; j++)
                {
                    h_numpy[j]  = new float[1];
                    h_numpy2[j] = new float[1];
                }

                for (int j = 0; j < 1; j++)
                {
                    c_numpy[j]  = new float[1];
                    c_numpy2[j] = new float[1];
                }
                var     loss  = 0.0f;
                MSELoss mloss = new MSELoss();
                for (int j = 0; j < 10; j++)
                {
                    x_numpy[0] = dataall[j + i];

                    var dhgird = lstm1.Forward(x_numpy, h_numpy, c_numpy);

                    h_numpy     = dhgird.Item1;
                    c_numpy     = dhgird.Item2;
                    dh_numpy[0] = new float[] { dataall[i + j + 1][0] };
                    loss       += mloss.Forward(h_numpy, dh_numpy);

                    var gird = mloss.Backward();
                    lstm1.backward(gird);
                    lstm1.update();
                }

                Console.WriteLine("误差:" + loss);
            }
            for (int i = 30000; i < 40000; i++)
            {
                for (int j = 0; j < 1; j++)
                {
                    h_numpy[j] = new float[1];
                }

                for (int j = 0; j < 1; j++)
                {
                    c_numpy[j] = new float[1];
                }
                var     loss  = 0.0f;
                MSELoss mloss = new MSELoss();

                for (int j = 0; j < 10; j++)
                {
                    x_numpy[0]  = dataall[j + i];
                    dh_numpy[0] = new float[] { dataall[j + i + 1][0] };
                    var dhgird = lstm1.Forward(x_numpy, h_numpy, c_numpy);
                    h_numpy = dhgird.Item1;
                    c_numpy = dhgird.Item2;
                    //fff += c_numpy[0][0];


                    //var gird = mloss.Backward();
                    //lstm.backward(gird);
                }
                loss += mloss.Forward(h_numpy, dh_numpy);
                Console.WriteLine("误差:" + loss + ",预测:" + (h_numpy[0][0]) * 1000 + ",期望:" + (dh_numpy[0][0]) * 1000);
            }
        }
Beispiel #11
0
        static void 单层()
        {
            // float[][][] prev_state = new float[2][][];
            GRU gru = new GRU(7, 15, 1);

            var x_numpy = new float[1][];
            var h_numpy = new float[1][];

            var h_numpy2 = new float[1][];

            var dh_numpy = new float[1][];
            var dataall  = getdata();

            for (int i = 0; i < 35000; i++)
            {
                for (int j = 0; j < 1; j++)
                {
                    h_numpy[j]  = new float[15];
                    h_numpy2[j] = new float[15];
                }


                var     loss  = 0.0f;
                MSELoss mloss = new MSELoss();
                for (int j = 0; j < 10; j++)
                {
                    x_numpy[0] = dataall[j + i];

                    var dhgird = gru.Forward(x_numpy, h_numpy);

                    h_numpy = dhgird.Item2;
                    // c_numpy = dhgird.Item2;
                    dh_numpy[0] = new float[] { dataall[i + j + 1][0] };
                    loss       += mloss.Forward(dhgird.Item1, dh_numpy);

                    var gird = mloss.Backward();
                    gru.backward(gird);
                    gru.update();
                }

                Console.WriteLine("误差:" + loss);
            }
            for (int i = 35000; i < 40000; i++)
            {
                for (int j = 0; j < 1; j++)
                {
                    h_numpy[j] = new float[15];
                }


                var     loss  = 0.0f;
                MSELoss mloss = new MSELoss();
                dynamic DY    = null;
                for (int j = 0; j < 10; j++)
                {
                    x_numpy[0]  = dataall[j + i];
                    dh_numpy[0] = new float[] { dataall[j + i + 1][0] };
                    var dhgird = gru.Forward(x_numpy, h_numpy);
                    h_numpy = dhgird.Item2;
                    DY      = dhgird.Item1;
                }
                loss += mloss.Forward(DY, dh_numpy);
                Console.WriteLine("误差:" + loss + ",预测:" + (DY[0][0]) * 1000 + ",期望:" + (dh_numpy[0][0]) * 1000);
            }
        }
Beispiel #12
0
        public string processMessage(string json_message)
        {
            //Debug.LogFormat("<color=green>SyftController.processMessage {0}</color>", json_message);

            Command msgObj = JsonUtility.FromJson <Command> (json_message);

            try
            {
                switch (msgObj.objectType)
                {
                case "FloatTensor":
                {
                    if (msgObj.objectIndex == 0 && msgObj.functionCall == "create")
                    {
                        FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader);
                        return(tensor.Id.ToString());
                    }
                    else
                    {
                        FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex);
                        // Process message's function
                        return(tensor.ProcessMessage(msgObj, this));
                    }
                }

                case "IntTensor":
                {
                    if (msgObj.objectIndex == 0 && msgObj.functionCall == "create")
                    {
                        int[] data = new int[msgObj.data.Length];
                        for (int i = 0; i < msgObj.data.Length; i++)
                        {
                            data[i] = (int)msgObj.data[i];
                        }
                        IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data, _shader: this.Shader);
                        return(tensor.Id.ToString());
                    }
                    else
                    {
                        IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex);
                        // Process message's function
                        return(tensor.ProcessMessage(msgObj, this));
                    }
                }

                case "model":
                {
                    if (msgObj.functionCall == "create")
                    {
                        string model_type = msgObj.tensorIndexParams[0];

                        if (model_type == "linear")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0} : {1} {2}", model_type,
                                            msgObj.tensorIndexParams[1], msgObj.tensorIndexParams[2]);
                            Linear model = new Linear(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2]));
                            return(model.Id.ToString());
                        }
                        else if (model_type == "sigmoid")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            Sigmoid model = new Sigmoid(this);
                            return(model.Id.ToString());
                        }
                        else if (model_type == "sequential")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            Sequential model = new Sequential(this);
                            return(model.Id.ToString());
                        }
                        else if (model_type == "policy")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            Policy model = new Policy(this, (Layer)getModel(int.Parse(msgObj.tensorIndexParams[1])));
                            return(model.Id.ToString());
                        }
                        else if (model_type == "tanh")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            Tanh model = new Tanh(this);
                            return(model.Id.ToString());
                        }
                        else if (model_type == "crossentropyloss")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            CrossEntropyLoss model = new CrossEntropyLoss(this);
                            return(model.Id.ToString());
                        }
                        else if (model_type == "mseloss")
                        {
                            Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type);
                            MSELoss model = new MSELoss(this);
                            return(model.Id.ToString());
                        }
                    }
                    else
                    {
                        Model model = this.getModel(msgObj.objectIndex);
                        return(model.ProcessMessage(msgObj, this));
                    }
                    return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
                }

                case "controller":
                {
                    if (msgObj.functionCall == "num_tensors")
                    {
                        return(floatTensorFactory.Count() + "");
                    }
                    else if (msgObj.functionCall == "num_models")
                    {
                        return(models.Count + "");
                    }
                    else if (msgObj.functionCall == "new_tensors_allowed")
                    {
                        Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]);
                        if (msgObj.tensorIndexParams[0] == "True")
                        {
                            allow_new_tensors = true;
                        }
                        else if (msgObj.tensorIndexParams[0] == "False")
                        {
                            allow_new_tensors = false;
                        }
                        else
                        {
                            throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?");
                        }

                        return(allow_new_tensors + "");
                    }
                    return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
                }

                default:
                    break;
                }
            }
            catch (Exception e)
            {
                Debug.LogFormat("<color=red>{0}</color>", e.ToString());
                return("Unity Error: " + e.ToString());
            }

            // If not executing createTensor or tensor function, return default error.
            return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall);
        }