Ejemplo n.º 1
0
        public void LSTM_Test_Params_Count()
        {
            //define values, and variables
            Variable x = Variable.InputVariable(new int[] { 2 }, DataType.Float, "input");
            Variable y = Variable.InputVariable(new int[] { 3 }, DataType.Float, "output");

            //Number of LSTM parameters
            var lstm1 = RNN.RecurrenceLSTM(x, 3, 3, DataType.Float, device, false, Activation.TanH, false, false, 1);

            var ft     = lstm1.Inputs.Where(l => l.Uid.StartsWith("Parameter")).ToList();
            var consts = lstm1.Inputs.Where(l => l.Uid.StartsWith("Constant")).ToList();
            var inp    = lstm1.Inputs.Where(l => l.Uid.StartsWith("Input")).ToList();

            //bias params
            var bs      = ft.Where(p => p.Name.Contains("_b")).ToList();
            var totalBs = bs.Sum(v => v.Shape.TotalSize);

            Assert.Equal(12, totalBs);
            //weights
            var ws      = ft.Where(p => p.Name.Contains("_w")).ToList();
            var totalWs = ws.Sum(v => v.Shape.TotalSize);

            Assert.Equal(24, totalWs);
            //update
            var us      = ft.Where(p => p.Name.Contains("_u")).ToList();
            var totalUs = us.Sum(v => v.Shape.TotalSize);

            Assert.Equal(36, totalUs);

            //total number opf parameters
            var totalOnly = totalBs + totalWs + totalUs;

            Assert.Equal(72, totalOnly);
        }
Ejemplo n.º 2
0
    // Use this for initialization
    void Start()
    {
        num_neurons = 10;
        double[] p_list = new double[120] {
            7.513874, -13.31366, -5.891386, 3.89313, -13.35374, -5.470473, -28.48326, -0.8950226, 1.676227, -7.829721, 3.001554, 0.9742008, 6.85763, 12.07218, -1.542749, 6.42987, -6.777609, -12.28891, -11.6297, -3.275043, -0.3349386, 13.02803, -3.171752, 2.909704, 7.935295, 2.154472, 5.825492, 6.763169, 6.5986, 0.846593, 6.619426, 0.1207947, 7.378351, 5.546535, -0.8831579, -3.48157, 2.024852, -5.732147, 16.51345, -11.30504, -4.979899, 0.1936812, -7.330712, -2.363033, -5.555974, -11.48479, 6.027504, -0.5046204, -5.604547, 6.99612, 8.643599, -0.5479781, 1.765478, 12.20771, -14.86998, -10.28358, 1.827907, -1.541205, 8.282341, 14.34195, 0.6747534, 8.42286, -10.29843, -0.5448059, 6.860302, -7.558879, -3.571216, -3.13385, 12.60011, -0.07997914, -4.452103, 1.369113, 7.464754, -8.164039, -4.246214, -7.685563, -1.948951, 15.26882, -5.696559, -1.464601, 1.735656, 13.39631, 12.12768, -10.33641, 1.26995, 6.143419, -3.200521, -6.426282, -4.033798, -9.858385, 3.240334, 5.088798, -8.307716, 4.577082, 9.247719, -4.710557, 11.59537, 1.740587, 1.173246, 5.654369, -0.01115066, 3.742572, 0.01587177, 5.836065, -1.146728, -0.4543813, -0.09580191, -3.896233, -1.911579, 4.912868, -0.4526708, 3.798782, 4.286755, 2.614983, 3.773993, -1.182102, 3.293242, 3.547439, 4.012696, 0.9868906,
        };
        float[] param_list = new float[120];
        for (int i = 0; i < 120; i++)
        {
            param_list [i] = (float)p_list [i];
        }

        run          = 1;
        spring_const = 1000;
        damper       = 100;

        //System.IO.File.Delete("C:/UnityLogs/best_costs.txt");

        //Assign joint references
        hip_right  = upper_right.GetComponent <ConfigurableJoint> ();
        knee_right = lower_right.GetComponent <HingeJoint> ();
        hip_left   = upper_left.GetComponent <ConfigurableJoint> ();
        knee_left  = lower_left.GetComponent <HingeJoint> ();

        gen_alg = new GenAlg();
        //net = new RNN (gen_alg.GetCurrentChrom());
        net = new RNN(new Chromosome(param_list));

        Debug.Log("weights: " + net.weights.ToString());
        counter = 0;

        //SetAngles (new float[]{0f, 0f, 0.5f, 0.5f, 0.5f, 0.5f});
        //SetAngles (new float[]{0f, 0f, 0f, 0f, 0f, 0f});
    }
Ejemplo n.º 3
0
        public void lstm_test01()
        {
            //define values, and variables
            Variable x       = Variable.InputVariable(new int[] { 4 }, DataType.Float, "input");
            var      xValues = Value.CreateBatchOfSequences <float>(new int[] { 4 }, mData, device);

            //
            var lstm00 = RNN.RecurrenceLSTM(x, 3, 3, DataType.Float, device, false, Activation.TanH, true, true, 1);

            //
            LSTMReccurentNN lstmNN = new LSTMReccurentNN(1, 1, device);
            //lstm implme reference 01
            var lstmCell = lstmNN.CreateLSTM(x, "output1");
            var lstm01   = CNTKLib.SequenceLast(lstmCell.h);

            //lstme implementation refe 02
            var lstm02 = LSTMSequenceClassifier.LSTMNet(x, 1, device, "output1");

            //
            var wParams00 = lstm00.Inputs.Where(p => p.Uid.Contains("Parameter")).ToList();
            var wParams01 = lstm00.Inputs.Where(p => p.Uid.Contains("Parameter")).ToList();
            var wParams02 = lstm00.Inputs.Where(p => p.Uid.Contains("Parameter")).ToList();

            //parameter count
            Assert.Equal(wParams00.Count, wParams01.Count);
            Assert.Equal(wParams00.Count, wParams02.Count);

            //structure of parameters test
            Assert.Equal(wParams00.Where(p => p.Name.Contains("_b")).Count(), wParams01.Where(p => p.Name.Contains("_b")).Count());
            Assert.Equal(wParams00.Where(p => p.Name.Contains("_w")).Count(), wParams01.Where(p => p.Name.Contains("_w")).Count());
            Assert.Equal(wParams00.Where(p => p.Name.Contains("_u")).Count(), wParams01.Where(p => p.Name.Contains("_u")).Count());
            Assert.Equal(wParams00.Where(p => p.Name.Contains("peep")).Count(), wParams01.Where(p => p.Name.Contains("peep")).Count());
            Assert.Equal(wParams00.Where(p => p.Name.Contains("stabilize")).Count(), wParams01.Where(p => p.Name.Contains("stabilize")).Count());


            //check structure of parameters with originaly developed lstm
            //chech for arguments
            Assert.True(lstm01.Arguments.Count == lstm02.Arguments.Count);
            for (int i = 0; i < lstm01.Arguments.Count; i++)
            {
                testVariable(lstm01.Arguments[i], lstm01.Arguments[i]);
            }

            ///
            Assert.True(lstm01.Inputs.Count == lstm02.Inputs.Count);
            for (int i = 0; i < lstm01.Inputs.Count; i++)
            {
                testVariable(lstm01.Inputs[i], lstm02.Inputs[i]);
            }

            ///
            Assert.True(lstm01.Outputs.Count == lstm02.Outputs.Count);
            for (int i = 0; i < lstm01.Outputs.Count; i++)
            {
                testVariable(lstm01.Outputs[i], lstm02.Outputs[i]);
            }
        }
Ejemplo n.º 4
0
        static RnnTrainResult Train(DataSet data, int layer1NodeCount, int layer2NodeCount, int epochMax)
        {
            var numInputs = data.Input.RowCount;
            var layers    = new List <LayerSpec> {
                new LayerSpec(layer1NodeCount, true, ActivationType.LogisticSigmoid),
                new LayerSpec(layer2NodeCount, true, ActivationType.LogisticSigmoid),
                new LayerSpec(1, false, ActivationType.Linear)
            };

            var weightCount    = RNN.GetWeightCount(layers, numInputs);
            var initialWeights = QuqeUtil.MakeRandomVector(weightCount, -1, 1);

            return(RNN.TrainSCG(layers, initialWeights, epochMax, data.Input, data.Output));
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Implementation of custom NN model
        /// </summary>
        /// <param name="data"></param>
        /// <param name="yearVar"></param>
        /// <param name="montVar"></param>
        /// <param name="shopVar"></param>
        /// <param name="itemVar"></param>
        /// <param name="cnt3Var"></param>
        /// <param name="label"></param>
        /// <param name="device"></param>
        /// <returns></returns>
        private static Function PredictFutureSalesModel(List <Variable> variables, DeviceDescriptor device)
        {
            //define features and label vars
            Variable yearVar = variables[0];
            Variable montVar = variables[1];
            Variable shopVar = variables[2];
            Variable itemVar = variables[3];
            Variable cnt3Var = variables[4];
            Variable label   = variables[5];

            //create rnn object
            var ffNet = new FeedForwaredNN(device);

            //predefined parameters
            var H_DIMS    = 11;
            var CELL_DIMS = 3;
            var DROPRATRE = 0.2f;
            var outDim    = label.Shape.Dimensions.Last();

            //embedding layer and dimensionality reduction
            var yearEmb    = Embedding.Create(yearVar, yearVar.Shape.Dimensions[0] - 1, DataType.Float, device, 1, yearVar.Name + "_emb");
            var monthEmb   = Embedding.Create(montVar, montVar.Shape.Dimensions[0] / 2, DataType.Float, device, 1, montVar.Name + "_emb");
            var varshopEmb = Embedding.Create(shopVar, shopVar.Shape.Dimensions[0] / 2, DataType.Float, device, 1, shopVar.Name + "_emb");

            var itemEmb  = Embedding.Create(itemVar, itemVar.Shape.Dimensions[0] / 2, DataType.Float, device, 1, itemVar.Name + "_emb");
            var itemEmb2 = Embedding.Create(itemEmb, itemEmb.Output.Shape.Dimensions[0] / 4, DataType.Float, device, 1, itemEmb.Name + "_emb");

            //join all embedding layers with input variable of previous product sales
            var emb = CNTKLib.Splice(new VariableVector()
            {
                yearEmb, monthEmb, varshopEmb, itemEmb2, cnt3Var
            }, new Axis(0));

            //create recurrence for time series on top of joined layer
            var lstmLayer = RNN.RecurrenceLSTM(emb, H_DIMS, CELL_DIMS, DataType.Float, device, false, Activation.TanH, true, true);

            //create dense on top of LSTM recurrence layers
            var denseLayer = ffNet.Dense(lstmLayer, 33, Activation.TanH);

            //create dropout layer on top of dense layer
            var dropoutLay = CNTKLib.Dropout(denseLayer, DROPRATRE);

            //create dense layer without activation function
            var outLayer = ffNet.Dense(dropoutLay, outDim, Activation.None, label.Name);

            //
            return(outLayer);
        }
 public void CreateNewPopulation(uint nPopulation)
 {
     for (int i = 0; i < nPopulation; i++)
     {
         // Sequential model = new Sequential();
         // model.Add(new Layer(8, 16, Activations.Tanh));
         // model.Add(new Layer(16, 8, Activations.Tanh));
         // model.Add(new Layer(8, 4, Activations.Tanh));
         // model.Add(new Layer(4, 2, Activations.Tanh));
         // model.Mutate(2f);
         // Individual individual = new Individual(model);
         // individual.Mutate(.5f);
         // Population.Add(individual);
         RNN        model      = new RNN(8, 3, new uint[] { 16, 8, 4, 2 }, Activations.Tanh);
         Individual individual = new Individual(model);
         Population.Add(individual);
     }
 }
Ejemplo n.º 7
0
        /// <summary>
        /// Create cntk model function by providing parameters. The method is able for create:
        ///     - feedforward  with one hidden layer and any number of neurons
        ///     - deep neural network with any number of hidden layers and any number of neurons. Each hidden number has the same number of neurons
        ///     - LSTM NN with any number of hidden layers of LSTM , and any number of LSTM Cells in each layer. Also at the top of the network you can define
        ///             one dense layer and one dropout layer.
        /// </summary>
        /// <param name="nnParams"></param>
        /// <returns></returns>
        public static Function CreateNetwrok(List <NNLayer> layers, List <Variable> inputVars, Variable outpuVar, DeviceDescriptor device)
        {
            DataType type       = DataType.Float;
            Variable inputLayer = null;

            if (inputVars.Count > 1)
            {
                var vv = new VariableVector();
                foreach (var v in inputVars)
                {
                    //check if variable is stores as Sparse then we should create one embedding layer before slice
                    //since mixing sparse and dense data is not supported
                    if (v.IsSparse)
                    {
                        var v1 = Embedding.Create(v, v.Shape.Dimensions.Last(), type, device, 1, v.Name + "_sp_emb");
                        vv.Add(v1);
                    }
                    else
                    {
                        vv.Add(v);
                    }
                }


                //
                inputLayer = (Variable)CNTKLib.Splice(vv, new Axis(0));
            }
            else //define input layer
            {
                inputLayer = inputVars.First();
            }


            //Create network
            var net = inputLayer;
            var ff  = new FeedForwaredNN(device, type);

            //set last layer name to label name
            layers.Last().Name = outpuVar.Name;

            //get last LSTM layer
            var lastLSTM = layers.Where(x => x.Type == LayerType.LSTM).LastOrDefault();

            //
            foreach (var layer in layers)
            {
                if (layer.Type == LayerType.Dense)
                {
                    net = ff.Dense(net, layer.HDimension, layer.Activation, layer.Name);
                }
                else if (layer.Type == LayerType.Drop)
                {
                    net = CNTKLib.Dropout(net, layer.Value / 100.0f);
                }
                else if (layer.Type == LayerType.Embedding)
                {
                    net = Embedding.Create(net, layer.HDimension, type, device, 1, layer.Name);
                }
                else if (layer.Type == LayerType.LSTM)
                {
                    var returnSequence = true;
                    if (layers.IndexOf(lastLSTM) == layers.IndexOf(layer))
                    {
                        returnSequence = false;
                    }
                    net = RNN.RecurrenceLSTM(net, layer.HDimension, layer.CDimension, type, device, returnSequence, layer.Activation,
                                             layer.Peephole, layer.SelfStabilization, 1);
                }
            }

            //check if last layer is compatible with the output
            if (net.Shape.Dimensions.Last() != outpuVar.Shape.Dimensions.Last())
            {
                ff.CreateOutputLayer(net, outpuVar, Activation.None);
            }

            return(net);
        }
            public void CopyWeights(INNBase source)
            {
                RNN src = (RNN)source;

                h_layers.CopyWeights(src.h_layers);
            }
Ejemplo n.º 9
0
        public void LSTM_Test_Params_Count_with_peep_selfstabilize()
        {
            //define values, and variables
            Variable x = Variable.InputVariable(new int[] { 2 }, DataType.Float, "input");
            Variable y = Variable.InputVariable(new int[] { 3 }, DataType.Float, "output");

            #region lstm org implemented in cntk for reference
            //lstme implementation refe 02
            var lstmTest02 = LSTMSequenceClassifier.LSTMNet(x, 3, device, "output1");
            var ft2        = lstmTest02.Inputs.Where(l => l.Uid.StartsWith("Parameter")).ToList();
            var totalSize  = ft2.Sum(p => p.Shape.TotalSize);
            //bias params
            var bs2      = ft2.Where(p => p.Name.Contains("_b")).ToList();
            var totalBs2 = bs2.Sum(v => v.Shape.TotalSize);

            //weights
            var ws2      = ft2.Where(p => p.Name.Contains("_w")).ToList();
            var totalWs2 = ws2.Sum(v => v.Shape.TotalSize);

            //update
            var us2      = ft2.Where(p => p.Name.Contains("_u")).ToList();
            var totalUs2 = us2.Sum(v => v.Shape.TotalSize);

            //peephole
            var ph2      = ft2.Where(p => p.Name.Contains("_peep")).ToList();
            var totalph2 = ph2.Sum(v => v.Shape.TotalSize);

            //stabilize
            var st2      = ft2.Where(p => p.Name.Contains("_stabilize")).ToList();
            var totalst2 = st2.Sum(v => v.Shape.TotalSize);
            #endregion

            #region anndotnet old implementation
            //
            //LSTMReccurentNN lstmNN = new LSTMReccurentNN(3, 3, device);
            ////lstm implme reference 01
            //var lstmCell11 = lstmNN.CreateLSTM(x, "output1");
            //var lstmTest01 = CNTKLib.SequenceLast(lstmCell11.h);
            //var ft1 = lstmTest01.Inputs.Where(l => l.Uid.StartsWith("Parameter")).ToList();
            //var consts1 = lstmTest01.Inputs.Where(l => l.Uid.StartsWith("Constant")).ToList();
            //var inp1 = lstmTest01.Inputs.Where(l => l.Uid.StartsWith("Input")).ToList();
            //var pparams1 = ft1.Sum(v => v.Shape.TotalSize);
            #endregion

            //Number of LSTM parameters
            var lstm1 = RNN.RecurrenceLSTM(x, 3, 3, DataType.Float, device, false, Activation.TanH, true, true, 1);

            var ft     = lstm1.Inputs.Where(l => l.Uid.StartsWith("Parameter")).ToList();
            var consts = lstm1.Inputs.Where(l => l.Uid.StartsWith("Constant")).ToList();
            var inp    = lstm1.Inputs.Where(l => l.Uid.StartsWith("Input")).ToList();

            //bias params
            var bs      = ft.Where(p => p.Name.Contains("_b")).ToList();
            var totalBs = bs.Sum(v => v.Shape.TotalSize);
            Assert.Equal(12, totalBs);
            //weights
            var ws      = ft.Where(p => p.Name.Contains("_w")).ToList();
            var totalWs = ws.Sum(v => v.Shape.TotalSize);
            Assert.Equal(24, totalWs);
            //update
            var us      = ft.Where(p => p.Name.Contains("_u")).ToList();
            var totalUs = us.Sum(v => v.Shape.TotalSize);
            Assert.Equal(36, totalUs);
            //peephole
            var ph      = ft.Where(p => p.Name.Contains("_peep")).ToList();
            var totalPh = ph.Sum(v => v.Shape.TotalSize);
            Assert.Equal(9, totalPh);
            //stabilize
            var st      = ft.Where(p => p.Name.Contains("_stabilize")).ToList();
            var totalst = st.Sum(v => v.Shape.TotalSize);
            Assert.Equal(6, totalst);

            var totalOnly          = totalBs + totalWs + totalUs;
            var totalWithSTabilize = totalOnly + totalst;
            var totalWithPeep      = totalOnly + totalPh;

            var totalP      = totalOnly + totalst + totalPh;
            var totalParams = ft.Sum(v => v.Shape.TotalSize);
            Assert.Equal(totalP, totalParams);
            //72- without peep and stab
            //75 - witout peep with stabil +3xm =
            //81 - with peephole and without stabil
            //87 - with peep ans stab 3+9
        }