コード例 #1
0
ファイル: NetworkBuilder.cs プロジェクト: pxqr/nanon
 // single layer network
 public static NeuralNetwork<Vector> Create(IDataSet<Vector, Vector> dataSet, IActivator activator)
 {
     var workLayer = new FullyConnectedLayer(dataSet.FirstInput.Size, dataSet.FirstOutput.Size, activator);
     var outputLayer = new OutputLayer<Vector>();
     var layers = new CompositeLayer<Vector, Vector, Vector>(workLayer, outputLayer);
     return new NeuralNetwork<Vector>(layers);
 }
コード例 #2
0
        public void Convert()
        {
            var layers = _model.Layer.Select(ConvertLayer).Where(x => x != null).ToList();

            var inputs = new List <InputLayer>(layers.OfType <InputLayer>());

            var outputs = new List <OutputLayer>();
            int i       = 0;

            foreach (var conn in _outputs.Values.Where(o => !o.Connections.Any()))
            {
                var output = new OutputLayer(conn.Dimensions)
                {
                    Name = $"output_{i++}"
                };
                conn.AddConnection(output.Input);
                outputs.Add(output);
            }

            Graph = new Graph(inputs, outputs);
        }
コード例 #3
0
ファイル: WeightSaveUtil.cs プロジェクト: Jock96/Jock.CNN
        /// <summary>
        /// Сохранение выходного слоя.
        /// </summary>
        /// <param name="path">Путь.</param>
        /// <param name="outputLayer">Выходной слой.</param>
        private static void OutputLayerSave(string path, OutputLayer outputLayer)
        {
            if (outputLayer != null)
            {
                var neuron          = outputLayer.GetOutputNeuron();
                var directoryToSave = Path.Combine(path, LayersConstants.OUTPUT_LAYER_NAME);

                if (!Directory.Exists(directoryToSave))
                {
                    Directory.CreateDirectory(directoryToSave);
                }

                var fileToSave = Path.Combine(directoryToSave, $"{0}" +
                                              $"{FileConstants.TEXT_EXTENSION}");

                using (var stream = new StreamWriter(fileToSave))
                {
                    neuron.Weights.ForEach(weight => stream.Write(weight + " "));
                }
            }
        }
コード例 #4
0
        public void TrainPerceptron2()
        {
            var rnd = new Random();

            foreach (var layer in OutputLayer)
            {
                foreach (var neuron in layer)
                {
                    for (var index = 0; index < neuron.Weights.Count; index++)
                    {
                        neuron.Weights[index] = rnd.NextDouble();
                    }
                }
            }
            smthV        = new List <double>();
            CurrentDelta = 0;
            var iteration = 1;

            while (iteration < 100 && CurrentDelta < valueSKO) //Пока не станет меньше значения
            {
                var indexLayer = 0;
                for (var index = 0; index < OutputLayer[indexLayer].Count; index++)
                {
                    var neuron = OutputLayer[indexLayer][index];
                    CurrentDelta     += TrainSumFunction(indexLayer, index);
                    neuron.valueSmthV = TrainSumFunction(indexLayer, index);
                }

                for (var index = 0; index < OutputLayer.Last().Count; index++)
                {
                    var neuron = OutputLayer.Last()[index];
                    TrainSumFunctionF2(neuron, OutputLayer.Count - 2, iteration);
                    neuron.valueOutputY1 = 1 - neuron.valueOutputY1;
                }

                CurrentDelta = Math.Abs(CurrentDelta / (OutputLayer.Count * OutputLayer[0].Count));

                iteration++;
            }
        }
コード例 #5
0
        public override void SaveModel(string filename)
        {
            //Save meta data
            using (StreamWriter sw = new StreamWriter(filename))
            {
                BinaryWriter fo = new BinaryWriter(sw.BaseStream);

                if (forwardHiddenLayers[0] is BPTTLayer)
                {
                    fo.Write(0);
                }
                else
                {
                    fo.Write(1);
                }

                fo.Write((int)ModelDirection);
                fo.Write(IsCRFTraining);

                fo.Write(forwardHiddenLayers.Count);
                //Save forward layers
                foreach (SimpleLayer layer in forwardHiddenLayers)
                {
                    layer.Save(fo);
                }
                //Save backward layers
                foreach (SimpleLayer layer in backwardHiddenLayers)
                {
                    layer.Save(fo);
                }
                //Save output layer
                OutputLayer.Save(fo);

                if (IsCRFTraining == true)
                {
                    // Save CRF features weights
                    RNNHelper.SaveMatrix(CRFTagTransWeights, fo);
                }
            }
        }
コード例 #6
0
        // save model as binary format
        public override void SaveModel(string filename)
        {
            StreamWriter sw = new StreamWriter(filename);
            BinaryWriter fo = new BinaryWriter(sw.BaseStream);

            if (HiddenLayerList[0] is BPTTLayer)
            {
                fo.Write(0);
            }
            else
            {
                fo.Write(1);
            }

            fo.Write((int)ModelDirection);

            // Signiture , 0 is for RNN or 1 is for RNN-CRF
            int iflag = 0;

            if (IsCRFTraining == true)
            {
                iflag = 1;
            }
            fo.Write(iflag);

            fo.Write(HiddenLayerList.Count);
            foreach (SimpleLayer layer in HiddenLayerList)
            {
                layer.Save(fo);
            }
            OutputLayer.Save(fo);

            if (iflag == 1)
            {
                // Save Bigram
                RNNHelper.SaveMatrix(CRFTagTransWeights, fo);
            }

            fo.Close();
        }
コード例 #7
0
        /// <summary>
        /// Конструктор с параметром обучения нейросети
        /// </summary>
        public NeuralNetwork(int TrainigCoof)
        {
            First = new InputLayer(6);

            Second = new HiddenLayer(5);

            Third = new HiddenLayer(4);

            Forth = new HiddenLayer(2);

            Fifth = new OutputLayer(4);

            SFirst = new SynapseLayer(6, 5);

            SSecond = new SynapseLayer(5, 4);

            SThird = new SynapseLayer(4, 2);

            SFourth = new SynapseLayer(2, 4);

            this.TrainigCoof = TrainigCoof;
        }
コード例 #8
0
        ///<summary>
        /// Конструктор для наследования
        /// </summary>
        public NeuralNetwork(int TrainigCoof, double [,] SFirst, double [,] SSecond, double[,] SThird, double[,] SFourth)
        {
            First = new InputLayer(6);

            Second = new HiddenLayer(5);

            Third = new HiddenLayer(4);

            Forth = new HiddenLayer(2);

            Fifth = new OutputLayer(4);

            this.SFirst = new SynapseLayer(SFirst, TrainigCoof);

            this.SSecond = new SynapseLayer(SSecond, TrainigCoof);

            this.SThird = new SynapseLayer(SThird, TrainigCoof);

            this.SFourth = new SynapseLayer(SFourth, TrainigCoof);

            this.TrainigCoof = TrainigCoof;
        }
コード例 #9
0
        ///<sumarry>
        ///Конструктор для наследования нейросети
        ///</sumarry>
        public NeuralNetwork(NeuralNetwork BaseNet)
        {
            First = new InputLayer(6);

            Second = new HiddenLayer(5);

            Third = new HiddenLayer(4);

            Forth = new HiddenLayer(2);

            Fifth = new OutputLayer(4);

            TrainigCoof = BaseNet.TRAING_COOF;

            SFirst = new SynapseLayer(BaseNet.S_FIRST, BaseNet.TRAING_COOF);

            SSecond = new SynapseLayer(BaseNet.S_CECOND, BaseNet.TRAING_COOF);

            SThird = new SynapseLayer(BaseNet.S_THIRD, BaseNet.TRAING_COOF);

            SFourth = new SynapseLayer(BaseNet.S_FORTH, BaseNet.TRAING_COOF);
        }
コード例 #10
0
ファイル: Program.cs プロジェクト: xiaoxiongnpu/Sigma
        /// <summary>
        /// Create an IRIS trainer that observers the current epoch and iteration
        /// </summary>
        /// <param name="sigma">The sigma environemnt.</param>
        /// <returns>The newly created trainer that can be added to the environemnt.</returns>
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            CsvRecordReader  irisReader    = new CsvRecordReader(new MultiSource(new FileSource("iris.data"), new UrlSource("http://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data")));
            IRecordExtractor irisExtractor = irisReader.Extractor("inputs", new[] { 0, 3 }, "targets", 4).AddValueMapping(4, "Iris-setosa", "Iris-versicolor", "Iris-virginica");

            irisExtractor = irisExtractor.Preprocess(new OneHotPreprocessor(sectionName: "targets", minValue: 0, maxValue: 2));
            irisExtractor = irisExtractor.Preprocess(new PerIndexNormalisingPreprocessor(0, 1, "inputs", 0, 4.3, 7.9, 1, 2.0, 4.4, 2, 1.0, 6.9, 3, 0.1, 2.5));

            Dataset  dataset           = new Dataset("iris", Dataset.BlockSizeAuto, irisExtractor);
            IDataset trainingDataset   = dataset;
            IDataset validationDataset = dataset;

            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(4)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(10)
                               + FullyConnectedLayer.Construct(3)
                               + OutputLayer.Construct(3)
                               + SquaredDifferenceCostLayer.Construct()
            };
            trainer.TrainingDataIterator = new MinibatchIterator(4, trainingDataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(validationDataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.002);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.4));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01, mean: 0.05));

            trainer.AddHook(new ValueReporterHook("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new ValidationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));
            trainer.AddLocalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
コード例 #11
0
        public void TrainPerceptron(List <List <double> > neuralData, double[] answersNumberOfClass, int iterations,
                                    double learningRate = 0.1)
        {
            var epoch = 1;

            while (iterations >= epoch)
            {
                var inputLayer = OutputLayer[0];
                var outputs    = new List <double>();

                for (var i = 0; i < neuralData.Count; i++)
                {
                    for (var j = 0; j < neuralData[i].Count; j++)
                    {
                        inputLayer[j].outputPulse = neuralData[i][j];
                    }

                    ComputeOutput();
                    outputs.Add(OutputLayer.Last().First().outputPulse);
                }

                double accuracySum = 0;
                var    y_counter   = 0;
                outputs.ForEach(x =>
                {
                    if (x == answersNumberOfClass[y_counter])
                    {
                        accuracySum++;
                    }

                    y_counter++;
                });

                //Optimize the synaptic weights
                OptimizeWeights(accuracySum / y_counter);
                epoch++;
            }
        }
コード例 #12
0
ファイル: LearningUtil.cs プロジェクト: Jock96/ANN_Lab1
        /// <summary>
        /// Обновить веса без скрытого слоя.
        /// </summary>
        /// <param name="outputLayer">Выходной слой.</param>
        /// <param name="iterationIndex">Индекс итерации.</param>
        private void UpdateWeightsWithouHiddenLayer(OutputLayer outputLayer, int iterationIndex)
        {
            var outputOfNeuron = outputLayer.GetOutput;

            var deltaOfOutputNeuron = GetOutputLayerNeuronDelta(outputOfNeuron);

            var currentInputs = _dataSets[iterationIndex];
            var gradients     = new List <double>();

            foreach (var input in currentInputs)
            {
                var gradient = input * deltaOfOutputNeuron;
                gradients.Add(gradient);
            }

            var outputWeights = outputLayer.GetNeuron.Weights;
            var weightIndex   = 0;

            var weightsDelta = new List <double>();

            foreach (var weight in outputWeights)
            {
                var weightDelta = _configuration.Epsilon * gradients[weightIndex] +
                                  _configuration.Alpha * outputLayer.GetNeuron.LastWeights[weightIndex];

                weightsDelta.Add(weightDelta);
                weightIndex++;
            }

            var newWeights = new List <double>();

            for (var i = 0; i < outputLayer.GetNeuron.Weights.Count; ++i)
            {
                newWeights.Add(outputLayer.GetNeuron.Weights[i] + weightsDelta[i]);
            }

            outputLayer.UpdateWeightsOfNeuronInLayer(newWeights);
        }
コード例 #13
0
        public IList <float> EvaluateOne(float[] input)
        {
            var inputVar  = InputLayer.InputVariable;
            var outputVar = OutputLayer.GetOutputVariable();


            Value inputdata = Value.CreateBatch(inputVar.Shape, input, Device, true);


            var inputDataMap = new Dictionary <Variable, Value>()
            {
                { inputVar, inputdata }
            };
            var outputDataMap = new Dictionary <Variable, Value>()
            {
                { outputVar, null }
            };


            outputVar.ToFunction().Evaluate(inputDataMap, outputDataMap, Device);
            var result = outputDataMap[outputVar].GetDenseData <float>(outputVar);

            return(result[0]);
        }
コード例 #14
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> forwardLayers  = new List <SimpleLayer>();
            List <SimpleLayer> backwardLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in forwardHiddenLayers)
            {
                forwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            foreach (SimpleLayer layer in backwardHiddenLayers)
            {
                backwardLayers.Add(layer.CreateLayerSharedWegiths());
            }

            BiRNNAvg <T> rnn = new BiRNNAvg <T>();

            rnn.InitCache(forwardLayers, backwardLayers, OutputLayer.CreateLayerSharedWegiths());
            rnn.CRFTagTransWeights = CRFTagTransWeights;
            rnn.MaxSeqLength       = MaxSeqLength;
            rnn.crfLocker          = crfLocker;

            return(rnn);
        }
コード例 #15
0
        // save model as binary format
        public override void SaveModel(string filename)
        {
            var sw = new StreamWriter(filename);
            var fo = new BinaryWriter(sw.BaseStream);

            fo.Write(IsCRFTraining);
            fo.Write(HiddenLayerList.Count);
            foreach (var layer in HiddenLayerList)
            {
                fo.Write((int)layer.LayerType);
                layer.Save(fo);
            }

            fo.Write((int)OutputLayer.LayerType);
            OutputLayer.Save(fo);

            if (IsCRFTraining)
            {
                //Save CRF feature weights
                RNNHelper.SaveMatrix(CRFWeights, fo);
            }

            fo.Close();
        }
コード例 #16
0
        public override RNN <T> Clone()
        {
            List <SimpleLayer> hiddenLayers = new List <SimpleLayer>();

            foreach (SimpleLayer layer in HiddenLayerList)
            {
                hiddenLayers.Add(layer.CreateLayerSharedWegiths());
            }

            ForwardRNN <T> rnn = new ForwardRNN <T>();

            rnn.HiddenLayerList = hiddenLayers;
            rnn.OutputLayer     = OutputLayer.CreateLayerSharedWegiths();
            rnn.CRFWeights      = CRFWeights;
            rnn.MaxSeqLength    = MaxSeqLength;
            rnn.bVQ             = bVQ;
            rnn.IsCRFTraining   = IsCRFTraining;
            if (rnn.IsCRFTraining)
            {
                rnn.InitializeCRFVariablesForTraining();
            }

            return(rnn);
        }
コード例 #17
0
        static void Xor()
        {
            const int batchSize = 4;
            const int epochSize = 16;

            var inputLayer  = new InputLayer3D(1, 1, 1);
            var outputLayer = new OutputLayer(1)
            {
                ActivationFunction = new ConstOutputArrayFunction()
            };
            var dataProvider = new FunctionProvider
            {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 1), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 0), Expected = new Array3D(1.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                },
                TestData = { new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 0), Expected = new Array3D(0)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(0, 1), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 0), Expected = new Array3D(1)
                             },
                             new TrainingData <Array3D, Array3D>
                             {
                                 Input = new Array3D(1, 1), Expected = new Array3D(0)
                             } },
                IsQueue = false
            };
            var oneData = new FunctionProvider {
                TrainData =
                {
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(0, 0), Expected = new Array3D(0.0)
                    },
                    new TrainingData <Array3D, Array3D>
                    {
                        Input = new Array3D(1, 1), Expected = new Array3D(0.0)
                    }
                }, IsQueue = false
            };
            var function = new FunctionProvider(x => Math.Pow(x, 2));

            var weight1 = new List <Array3D> {
                new Array3D(0.1, 0.3), new Array3D(0.3, 0.1)
            };
            var weight2 = new List <Array3D> {
                new Array3D(0.4, 0.5)
            };
            var perceptron1 = new PerceptronLayer(5, 2)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron1.Trainer = new MiniBatchPerceptronTrainer(perceptron1.Neurals, false)
            {
                BatchSize = batchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };
            var perceptron2 = new PerceptronLayer(1, 5)
            {
                ActivationFunction = new TanhActivationFunction()
            };

            perceptron2.Trainer = new MiniBatchPerceptronTrainer(perceptron2.Neurals, true)
            {
                BatchSize = batchSize, ActivationFunction = new TanhActivationFunction(), LearningRate = 0.1, Momentum = 0.1
            };

            var network = new MultiLayerPerceptron
            {
                InputLayer   = inputLayer,
                OutputLayer  = outputLayer,
                DataProvider = dataProvider
            };

            network.HiddenLayers.Add(perceptron1);
            network.HiddenLayers.Add(perceptron2);

            var trainer = new FCTrainer(network, epochSize, batchSize, dataProvider);

            trainer.Train(200);
        }
コード例 #18
0
        public override void LoadModel(string filename, bool bTrain = false)
        {
            Logger.WriteLine(Logger.Level.info, "Loading bi-directional model: {0}", filename);

            using (var sr = new StreamReader(filename))
            {
                var br = new BinaryReader(sr.BaseStream);

                IsCRFTraining = br.ReadBoolean();
                var       layerSize = br.ReadInt32();
                LayerType layerType = LayerType.None;

                //Load forward layers from file
                forwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    forwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = forwardHiddenLayers[forwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                //Load backward layers from file
                backwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    backwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = backwardHiddenLayers[backwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                Logger.WriteLine("Create output layer");
                layerType   = (LayerType)br.ReadInt32();
                OutputLayer = Load(layerType, br);

                if (bTrain)
                {
                    OutputLayer.SetRunningMode(RunningMode.Training);
                    OutputLayer.InitializeInternalTrainingParameters();
                }
                else
                {
                    OutputLayer.SetRunningMode(RunningMode.Test);
                }

                if (IsCRFTraining)
                {
                    Logger.WriteLine("Loading CRF tag trans weights...");
                    CRFWeights = RNNHelper.LoadMatrix(br);
                }

                if (bTrain)
                {
                    InitCache(forwardHiddenLayers, backwardHiddenLayers, OutputLayer.CreateLayerSharedWegiths());
                }
            }
        }
コード例 #19
0
        public override int[] ProcessSequence(ISequence sequence, RunningMode runningMode, bool outputRawScore, out Matrix <float> m)
        {
            Sequence pSequence = sequence as Sequence;

            var numStates = pSequence.States.Length;
            var numLayers = HiddenLayerList.Count;

            m = outputRawScore ? new Matrix <float>(numStates, OutputLayer.LayerSize) : null;

            var predicted  = new int[numStates];
            var isTraining = runningMode == RunningMode.Training;

            //reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset();
            }

            //Set current sentence labels into short list in output layer
            OutputLayer.LabelShortList.Clear();
            foreach (var state in pSequence.States)
            {
                OutputLayer.LabelShortList.Add(state.Label);
            }

            for (var curState = 0; curState < numStates; curState++)
            {
                //Compute first layer
                var state = pSequence.States[curState];
                SetRuntimeFeatures(state, curState, numStates, predicted);
                HiddenLayerList[0].ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());

                //Compute each layer
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    HiddenLayerList[i].ForwardPass(state.SparseFeature, HiddenLayerList[i - 1].Cells);
                }

                //Compute output layer
                OutputLayer.ForwardPass(state.SparseFeature, HiddenLayerList[numLayers - 1].Cells);

                if (m != null)
                {
                    OutputLayer.Cells.CopyTo(m[curState], 0);
                }

                predicted[curState] = OutputLayer.GetBestOutputIndex();

                if (runningMode == RunningMode.Training)
                {
                    // error propogation
                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    //propogate errors to each layer from output layer to input layer
                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (var i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    OutputLayer.BackwardPass();

                    for (var i = 0; i < numLayers; i++)
                    {
                        HiddenLayerList[i].BackwardPass();
                    }
                }
            }

            return(predicted);
        }
コード例 #20
0
        public override int[] ProcessSequenceCRF(Sequence pSequence, RunningMode runningMode)
        {
            var numStates = pSequence.States.Length;
            var numLayers = HiddenLayerList.Count;

            //Get network output without CRF
            Matrix <float> nnOutput;

            ProcessSequence(pSequence, RunningMode.Test, true, out nnOutput);

            //Compute CRF result
            ForwardBackward(numStates, nnOutput);

            //Compute best path in CRF result
            var predicted = Viterbi(nnOutput, numStates);

            if (runningMode == RunningMode.Training)
            {
                //Update tag bigram transition for CRF model
                UpdateBigramTransition(pSequence);

                //Reset all layer states
                foreach (var layer in HiddenLayerList)
                {
                    layer.Reset();
                }

                for (var curState = 0; curState < numStates; curState++)
                {
                    // error propogation
                    var state = pSequence.States[curState];
                    SetRuntimeFeatures(state, curState, numStates, null);
                    HiddenLayerList[0].SetRunningMode(runningMode);
                    HiddenLayerList[0].ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());

                    for (var i = 1; i < numLayers; i++)
                    {
                        HiddenLayerList[i].SetRunningMode(runningMode);
                        HiddenLayerList[i].ForwardPass(state.SparseFeature, HiddenLayerList[i - 1].Cells);
                    }

                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (var i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    OutputLayer.BackwardPass();

                    for (var i = 0; i < numLayers; i++)
                    {
                        HiddenLayerList[i].BackwardPass();
                    }
                }
            }

            return(predicted);
        }
コード例 #21
0
        public override int[] ProcessSequence(Sequence pSequence, RunningMode runningMode, bool outputRawScore, out Matrix <double> m)
        {
            int numStates = pSequence.States.Length;
            int numLayers = HiddenLayerList.Count;

            if (outputRawScore == true)
            {
                m = new Matrix <double>(numStates, OutputLayer.LayerSize);
            }
            else
            {
                m = null;
            }

            int[] predicted  = new int[numStates];
            bool  isTraining = true;

            if (runningMode == RunningMode.Training)
            {
                isTraining = true;
            }
            else
            {
                isTraining = false;
            }

            //reset all layers
            foreach (SimpleLayer layer in HiddenLayerList)
            {
                layer.netReset(isTraining);
            }

            for (int curState = 0; curState < numStates; curState++)
            {
                //Compute first layer
                State state = pSequence.States[curState];
                SetInputLayer(state, curState, numStates, predicted);
                HiddenLayerList[0].computeLayer(state.SparseData, state.DenseData.CopyTo(), isTraining);

                //Compute each layer
                for (int i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    HiddenLayerList[i].computeLayer(state.SparseData, HiddenLayerList[i - 1].cellOutput, isTraining);
                }

                //Compute output layer
                OutputLayer.CurrentLabelId = state.Label;
                OutputLayer.computeLayer(state.SparseData, HiddenLayerList[numLayers - 1].cellOutput, isTraining);

                if (m != null)
                {
                    OutputLayer.cellOutput.CopyTo(m[curState], 0);
                }

                OutputLayer.Softmax(isTraining);

                predicted[curState] = OutputLayer.GetBestOutputIndex(isTraining);

                if (runningMode != RunningMode.Test)
                {
                    logp += Math.Log10(OutputLayer.cellOutput[state.Label] + 0.0001);
                }

                if (runningMode == RunningMode.Training)
                {
                    // error propogation
                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    //propogate errors to each layer from output layer to input layer
                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (int i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    Parallel.Invoke(() =>
                    {
                        OutputLayer.LearnFeatureWeights(numStates, curState);
                    },
                                    () =>
                    {
                        Parallel.For(0, numLayers, parallelOption, i =>
                        {
                            HiddenLayerList[i].LearnFeatureWeights(numStates, curState);
                        });
                    });
                }
            }

            return(predicted);
        }
コード例 #22
0
        public override int[] TestSeq2Seq(Sentence srcSentence, Config featurizer)
        {
            var curState = featurizer.BuildState(new[] { "<s>" });

            curState.Label = featurizer.TagSet.GetIndex("<s>");

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset(false);
            }

            //Extract features from source sentence
            var srcSequence = featurizer.Seq2SeqAutoEncoder.Config.BuildSequence(srcSentence);

            float[] srcHiddenAvgOutput;
            Dictionary <int, float> srcSparseFeatures;

            ExtractSourceSentenceFeature(featurizer.Seq2SeqAutoEncoder, srcSequence, curState.SparseFeature.Length,
                                         out srcHiddenAvgOutput, out srcSparseFeatures);

            var numLayers = HiddenLayerList.Count;
            var predicted = new List <int> {
                curState.Label
            };

            while (true)
            {
                //Build sparse features
                var sparseVector = new SparseVector();
                sparseVector.SetLength(curState.SparseFeature.Length + srcSequence.SparseFeatureSize);
                sparseVector.AddKeyValuePairData(curState.SparseFeature);
                sparseVector.AddKeyValuePairData(srcSparseFeatures);

                //Compute first layer
                var denseFeatures = RNNHelper.ConcatenateVector(curState.DenseFeature, srcHiddenAvgOutput);
                HiddenLayerList[0].ForwardPass(sparseVector, denseFeatures, false);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    denseFeatures = RNNHelper.ConcatenateVector(HiddenLayerList[i - 1].Cell, srcHiddenAvgOutput);
                    HiddenLayerList[i].ForwardPass(sparseVector, denseFeatures, false);
                }

                //Compute output layer
                denseFeatures = RNNHelper.ConcatenateVector(HiddenLayerList[numLayers - 1].Cell,
                                                            srcHiddenAvgOutput);
                OutputLayer.ForwardPass(sparseVector, denseFeatures, false);

                OutputLayer.Softmax(false);

                var nextTagId = OutputLayer.GetBestOutputIndex(false);
                var nextWord  = featurizer.TagSet.GetTagName(nextTagId);

                curState       = featurizer.BuildState(new[] { nextWord });
                curState.Label = nextTagId;

                predicted.Add(nextTagId);

                if (nextWord == "</s>" || predicted.Count >= 100)
                {
                    break;
                }
            }

            return(predicted.ToArray());
        }
コード例 #23
0
 //Sends the inputs once through the network and returns the output
 public double[] Compute(params double[] inputs)
 {
     Train(inputs);
     return OutputLayer.Select(a => a.Value).ToArray();
 }
コード例 #24
0
        public override int[] ProcessSequenceCRF(Sequence pSequence, RunningMode runningMode)
        {
            int numStates = pSequence.States.Length;
            int numLayers = HiddenLayerList.Count;

            //Get network output without CRF
            Matrix <double> nnOutput;

            ProcessSequence(pSequence, RunningMode.Test, true, out nnOutput);

            //Compute CRF result
            ForwardBackward(numStates, nnOutput);

            if (runningMode != RunningMode.Test)
            {
                //Get the best result
                for (int i = 0; i < numStates; i++)
                {
                    logp += Math.Log10(CRFSeqOutput[i][pSequence.States[i].Label] + 0.0001);
                }
            }

            //Compute best path in CRF result
            int[] predicted = Viterbi(nnOutput, numStates);

            if (runningMode == RunningMode.Training)
            {
                //Update tag bigram transition for CRF model
                UpdateBigramTransition(pSequence);

                //Reset all layer states
                foreach (SimpleLayer layer in HiddenLayerList)
                {
                    layer.netReset(true);
                }

                for (int curState = 0; curState < numStates; curState++)
                {
                    // error propogation
                    State state = pSequence.States[curState];
                    SetInputLayer(state, curState, numStates, null);
                    HiddenLayerList[0].computeLayer(state.SparseData, state.DenseData.CopyTo());

                    for (int i = 1; i < numLayers; i++)
                    {
                        HiddenLayerList[i].computeLayer(state.SparseData, HiddenLayerList[i - 1].cellOutput);
                    }

                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (int i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    Parallel.Invoke(() =>
                    {
                        OutputLayer.LearnFeatureWeights(numStates, curState);
                    },
                                    () =>
                    {
                        Parallel.For(0, numLayers, parallelOption, i =>
                        {
                            HiddenLayerList[i].LearnFeatureWeights(numStates, curState);
                        });
                    });
                }
            }

            return(predicted);
        }
コード例 #25
0
        private int[] TrainSequencePair(ISequence sequence, RunningMode runningMode, bool outputRawScore, out Matrix <float> m)
        {
            SequencePair pSequence   = sequence as SequencePair;
            var          tgtSequence = pSequence.tgtSequence;

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset();
            }

            Sequence srcSequence;

            //Extract features from source sentences
            srcSequence = pSequence.autoEncoder.Config.BuildSequence(pSequence.srcSentence);
            List <float[]> srcDenseFeatureGorups = new List <float[]>();
            SparseVector   srcSparseFeatures     = new SparseVector();

            ExtractSourceSentenceFeature(pSequence.autoEncoder, srcSequence, tgtSequence.SparseFeatureSize, srcDenseFeatureGorups, srcSparseFeatures);

            var numStates = pSequence.tgtSequence.States.Length;
            var numLayers = HiddenLayerList.Count;
            var predicted = new int[numStates];

            m = outputRawScore ? new Matrix <float>(numStates, OutputLayer.LayerSize) : null;

            //Set target sentence labels into short list in output layer
            OutputLayer.LabelShortList.Clear();
            foreach (var state in tgtSequence.States)
            {
                OutputLayer.LabelShortList.Add(state.Label);
            }

            //Set sparse feature group from source sequence
            sparseFeatureGorups.Clear();
            sparseFeatureGorups.Add(srcSparseFeatures);
            sparseFeatureGorups.Add(null);
            int targetSparseFeatureIndex = sparseFeatureGorups.Count - 1;

            //Set dense feature groups from source sequence
            for (var i = 0; i < numLayers; i++)
            {
                denseFeatureGroupsList[i].Clear();
                denseFeatureGroupsList[i].AddRange(srcDenseFeatureGorups);
                denseFeatureGroupsList[i].Add(null);
            }
            denseFeatureGroupsOutputLayer.Clear();
            denseFeatureGroupsOutputLayer.AddRange(srcDenseFeatureGorups);
            denseFeatureGroupsOutputLayer.Add(null);
            int targetDenseFeatureIndex = denseFeatureGroupsOutputLayer.Count - 1;

            for (var curState = 0; curState < numStates; curState++)
            {
                var state = tgtSequence.States[curState];

                //Set sparse feature groups
                sparseFeatureGorups[targetSparseFeatureIndex] = state.SparseFeature;

                //Compute first layer
                denseFeatureGroupsList[0][targetDenseFeatureIndex] = state.DenseFeature.CopyTo();
                HiddenLayerList[0].ForwardPass(sparseFeatureGorups, denseFeatureGroupsList[0]);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    denseFeatureGroupsList[i][targetDenseFeatureIndex] = HiddenLayerList[i - 1].Cells;
                    HiddenLayerList[i].ForwardPass(sparseFeatureGorups, denseFeatureGroupsList[i]);
                }

                //Compute output layer
                denseFeatureGroupsOutputLayer[targetDenseFeatureIndex] = HiddenLayerList[numLayers - 1].Cells;
                OutputLayer.ForwardPass(sparseFeatureGorups, denseFeatureGroupsOutputLayer);

                if (m != null)
                {
                    OutputLayer.Cells.CopyTo(m[curState], 0);
                }

                predicted[curState] = OutputLayer.GetBestOutputIndex();

                if (runningMode == RunningMode.Training)
                {
                    // error propogation
                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    //propogate errors to each layer from output layer to input layer
                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (var i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    OutputLayer.BackwardPass();
                    for (var i = 0; i < numLayers; i++)
                    {
                        HiddenLayerList[i].BackwardPass();
                    }
                }
            }

            return(predicted);
        }
コード例 #26
0
        private int[] PredictTargetSentence(Sentence sentence, Config featurizer, out Matrix <float> m)
        {
            m = null;

            var curState = featurizer.BuildState(new[] { "<s>" });

            curState.Label = featurizer.TagSet.GetIndex("<s>");

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset();
            }

            //Extract features from source sentence
            var            srcSequence           = featurizer.Seq2SeqAutoEncoder.Config.BuildSequence(sentence);
            List <float[]> srcDenseFeatureGorups = new List <float[]>();
            SparseVector   srcSparseFeatures     = new SparseVector();

            ExtractSourceSentenceFeature(featurizer.Seq2SeqAutoEncoder, srcSequence, curState.SparseFeature.Length, srcDenseFeatureGorups, srcSparseFeatures);

            var numLayers = HiddenLayerList.Count;
            var predicted = new List <int> {
                curState.Label
            };

            //Set sparse feature group from source sequence
            sparseFeatureGorups.Clear();
            sparseFeatureGorups.Add(srcSparseFeatures);
            sparseFeatureGorups.Add(null);
            int targetSparseFeatureIndex = sparseFeatureGorups.Count - 1;

            //Set dense feature groups from source sequence
            for (var i = 0; i < numLayers; i++)
            {
                denseFeatureGroupsList[i].Clear();
                denseFeatureGroupsList[i].AddRange(srcDenseFeatureGorups);
                denseFeatureGroupsList[i].Add(null);
            }
            denseFeatureGroupsOutputLayer.Clear();
            denseFeatureGroupsOutputLayer.AddRange(srcDenseFeatureGorups);
            denseFeatureGroupsOutputLayer.Add(null);
            int targetDenseFeatureIndex = denseFeatureGroupsOutputLayer.Count - 1;

            while (true)
            {
                //Set sparse feature groups
                sparseFeatureGorups[targetSparseFeatureIndex] = curState.SparseFeature;

                //Compute first layer
                denseFeatureGroupsList[0][targetDenseFeatureIndex] = curState.DenseFeature.CopyTo();
                HiddenLayerList[0].ForwardPass(sparseFeatureGorups, denseFeatureGroupsList[0]);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    denseFeatureGroupsList[i][targetDenseFeatureIndex] = HiddenLayerList[i - 1].Cells;
                    HiddenLayerList[i].ForwardPass(sparseFeatureGorups, denseFeatureGroupsList[i]);
                }

                //Compute output layer
                denseFeatureGroupsOutputLayer[targetDenseFeatureIndex] = HiddenLayerList[numLayers - 1].Cells;
                OutputLayer.ForwardPass(sparseFeatureGorups, denseFeatureGroupsOutputLayer);


                var nextTagId = OutputLayer.GetBestOutputIndex();
                var nextWord  = featurizer.TagSet.GetTagName(nextTagId);

                curState       = featurizer.BuildState(new[] { nextWord });
                curState.Label = nextTagId;

                predicted.Add(nextTagId);

                if (nextWord == "</s>" || predicted.Count >= 100)
                {
                    break;
                }
            }

            return(predicted.ToArray());
        }
コード例 #27
0
    public void MutateNodes()
    {
        List <NeuralGeneNode> tmpNodesNoinputsList = new List <NeuralGeneNode>();

        foreach (NeuralGeneNode node in HiddenLayers)
        {
            tmpNodesNoinputsList.Add(node);
        }

        foreach (NeuralGeneNode node in OutputLayer)
        {
            tmpNodesNoinputsList.Add(node);
        }

        NeuralGeneNode tmpHiddenNode = AddHiddenNode(neuralActivationFunctions[1]);

        List <NeuralGeneConnection> possibleConnectionsList = new List <NeuralGeneConnection>();

        foreach (var node in tmpNodesNoinputsList)
        {
            foreach (var connection in node.inputSynapses)
            {
                if ((connection.outputNeuron.nodeNumber > tmpHiddenNode.nodeNumber || OutputLayer.Contains(connection.outputNeuron)) && connection.connectionIsEnabled)
                {
                    possibleConnectionsList.Add(connection);
                }
            }
        }

        int nodeConnectionIndex = UnityEngine.Random.Range(0, possibleConnectionsList.Count);

        possibleConnectionsList[nodeConnectionIndex].connectionIsEnabled = false;

        AddConnection(possibleConnectionsList[nodeConnectionIndex].inputNeuron, tmpHiddenNode, true);
        AddConnection(tmpHiddenNode, possibleConnectionsList[nodeConnectionIndex].outputNeuron, true);
    }
コード例 #28
0
        protected int[] PredictTargetSentence(Sentence sentence, Config featurizer, out Matrix <float> m)
        {
            m = null;

            var curState = featurizer.BuildState(new[] { "<s>" });

            curState.Label = featurizer.TagSet.GetIndex("<s>");

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset();
            }

            //Extract features from source sentence
            var srcSequence = featurizer.Seq2SeqAutoEncoder.Config.BuildSequence(sentence);

            ExtractSourceSentenceFeature(featurizer.Seq2SeqAutoEncoder, srcSequence, curState.SparseFeature.Length);

            var numLayers = HiddenLayerList.Count;
            var predicted = new List <int> {
                curState.Label
            };

            CreateDenseFeatureList();
            for (int i = 0; i < numLayers; i++)
            {
                srcHiddenAvgOutput.CopyTo(denseFeaturesList[i], 0);
            }
            srcHiddenAvgOutput.CopyTo(denseFeaturesList[numLayers], 0);

            var sparseVector = new SparseVector();

            while (true)
            {
                //Build sparse features
                sparseVector.Clean();
                sparseVector.SetLength(curState.SparseFeature.Length + srcSequence.SparseFeatureSize);
                sparseVector.AddKeyValuePairData(curState.SparseFeature);
                sparseVector.AddKeyValuePairData(srcSparseFeatures);

                //Compute first layer
                curState.DenseFeature.CopyTo().CopyTo(denseFeaturesList[0], srcHiddenAvgOutput.Length);
                HiddenLayerList[0].ForwardPass(sparseVector, denseFeaturesList[0]);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    HiddenLayerList[i - 1].Cells.CopyTo(denseFeaturesList[i], srcHiddenAvgOutput.Length);
                    HiddenLayerList[i].ForwardPass(sparseVector, denseFeaturesList[i]);
                }

                //Compute output layer
                HiddenLayerList[numLayers - 1].Cells.CopyTo(denseFeaturesList[numLayers], srcHiddenAvgOutput.Length);
                OutputLayer.ForwardPass(sparseVector, denseFeaturesList[numLayers]);

                var nextTagId = OutputLayer.GetBestOutputIndex();
                var nextWord  = featurizer.TagSet.GetTagName(nextTagId);

                curState       = featurizer.BuildState(new[] { nextWord });
                curState.Label = nextTagId;

                predicted.Add(nextTagId);

                if (nextWord == "</s>" || predicted.Count >= 100)
                {
                    break;
                }
            }

            return(predicted.ToArray());
        }
コード例 #29
0
 public double[] Query(Sample sample)
 {
     Compute(sample, false);
     return(OutputLayer.Select(op => op.Value).ToArray());
 }
コード例 #30
0
        protected virtual int[] TrainSequencePair(ISequence sequence, RunningMode runningMode, bool outputRawScore, out Matrix <float> m)
        {
            SequencePair pSequence   = sequence as SequencePair;
            var          tgtSequence = pSequence.tgtSequence;

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset();
            }

            Sequence srcSequence;

            //Extract features from source sentences
            srcSequence = pSequence.autoEncoder.Config.BuildSequence(pSequence.srcSentence);
            ExtractSourceSentenceFeature(pSequence.autoEncoder, srcSequence, tgtSequence.SparseFeatureSize);

            var numStates      = pSequence.tgtSequence.States.Length;
            var numLayers      = HiddenLayerList.Count;
            var predicted      = new int[numStates];
            var previousLables = new int[numStates];

            m = outputRawScore ? new Matrix <float>(numStates, OutputLayer.LayerSize) : null;

            //Set target sentence labels into short list in output layer
            OutputLayer.LabelShortList.Clear();
            foreach (var state in tgtSequence.States)
            {
                OutputLayer.LabelShortList.Add(state.Label);
            }

            CreateDenseFeatureList();
            for (int i = 0; i < numLayers; i++)
            {
                srcHiddenAvgOutput.CopyTo(denseFeaturesList[i], 0);
            }
            srcHiddenAvgOutput.CopyTo(denseFeaturesList[numLayers], 0);

            var sparseVector = new SparseVector();

            for (var curState = 0; curState < numStates; curState++)
            {
                //Build runtime features
                var state = tgtSequence.States[curState];
                SetRuntimeFeatures(state, curState, numStates, (runningMode == RunningMode.Training) ? previousLables : predicted);

                //Build sparse features for all layers
                sparseVector.Clean();
                sparseVector.SetLength(tgtSequence.SparseFeatureSize + srcSequence.SparseFeatureSize);
                sparseVector.AddKeyValuePairData(state.SparseFeature);
                sparseVector.AddKeyValuePairData(srcSparseFeatures);

                //Compute first layer
                state.DenseFeature.CopyTo().CopyTo(denseFeaturesList[0], srcHiddenAvgOutput.Length);
                HiddenLayerList[0].ForwardPass(sparseVector, denseFeaturesList[0]);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    HiddenLayerList[i - 1].Cells.CopyTo(denseFeaturesList[i], srcHiddenAvgOutput.Length);
                    HiddenLayerList[i].ForwardPass(sparseVector, denseFeaturesList[i]);
                }

                //Compute output layer
                HiddenLayerList[numLayers - 1].Cells.CopyTo(denseFeaturesList[numLayers], srcHiddenAvgOutput.Length);
                OutputLayer.ForwardPass(sparseVector, denseFeaturesList[numLayers]);

                if (m != null)
                {
                    OutputLayer.Cells.CopyTo(m[curState], 0);
                }

                predicted[curState] = OutputLayer.GetBestOutputIndex();

                if (runningMode == RunningMode.Training)
                {
                    previousLables[curState] = state.Label;

                    // error propogation
                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    //propogate errors to each layer from output layer to input layer
                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (var i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    OutputLayer.BackwardPass();

                    for (var i = 0; i < numLayers; i++)
                    {
                        HiddenLayerList[i].BackwardPass();
                    }
                }
            }

            return(predicted);
        }
コード例 #31
0
        public override int[] ProcessSeq2Seq(SequencePair pSequence, RunningMode runningMode)
        {
            var tgtSequence = pSequence.tgtSequence;
            var isTraining  = runningMode == RunningMode.Training;

            //Reset all layers
            foreach (var layer in HiddenLayerList)
            {
                layer.Reset(isTraining);
            }

            //Extract features from source sentences
            var srcSequence = pSequence.autoEncoder.Config.BuildSequence(pSequence.srcSentence);

            float[] srcHiddenAvgOutput;
            Dictionary <int, float> srcSparseFeatures;

            ExtractSourceSentenceFeature(pSequence.autoEncoder, srcSequence, tgtSequence.SparseFeatureSize,
                                         out srcHiddenAvgOutput, out srcSparseFeatures);

            var numStates = pSequence.tgtSequence.States.Length;
            var numLayers = HiddenLayerList.Count;
            var predicted = new int[numStates];

            //Set target sentence labels into short list in output layer
            OutputLayer.LabelShortList = new List <int>();
            foreach (var state in tgtSequence.States)
            {
                OutputLayer.LabelShortList.Add(state.Label);
            }

            for (var curState = 0; curState < numStates; curState++)
            {
                //Build runtime features
                var state = tgtSequence.States[curState];
                SetRuntimeFeatures(state, curState, numStates, predicted);

                //Build sparse features for all layers
                var sparseVector = new SparseVector();
                sparseVector.SetLength(tgtSequence.SparseFeatureSize + srcSequence.SparseFeatureSize);
                sparseVector.AddKeyValuePairData(state.SparseFeature);
                sparseVector.AddKeyValuePairData(srcSparseFeatures);

                //Compute first layer
                var denseFeatures = RNNHelper.ConcatenateVector(state.DenseFeature, srcHiddenAvgOutput);
                HiddenLayerList[0].ForwardPass(sparseVector, denseFeatures, isTraining);

                //Compute middle layers
                for (var i = 1; i < numLayers; i++)
                {
                    //We use previous layer's output as dense feature for current layer
                    denseFeatures = RNNHelper.ConcatenateVector(HiddenLayerList[i - 1].Cell, srcHiddenAvgOutput);
                    HiddenLayerList[i].ForwardPass(sparseVector, denseFeatures, isTraining);
                }

                //Compute output layer
                denseFeatures = RNNHelper.ConcatenateVector(HiddenLayerList[numLayers - 1].Cell,
                                                            srcHiddenAvgOutput);
                OutputLayer.ForwardPass(sparseVector, denseFeatures, isTraining);

                OutputLayer.Softmax(isTraining);

                predicted[curState] = OutputLayer.GetBestOutputIndex(isTraining);

                if (runningMode != RunningMode.Test)
                {
                    logp += Math.Log10(OutputLayer.Cell[state.Label] + 0.0001);
                }

                if (runningMode == RunningMode.Training)
                {
                    // error propogation
                    OutputLayer.ComputeLayerErr(CRFSeqOutput, state, curState);

                    //propogate errors to each layer from output layer to input layer
                    HiddenLayerList[numLayers - 1].ComputeLayerErr(OutputLayer);
                    for (var i = numLayers - 2; i >= 0; i--)
                    {
                        HiddenLayerList[i].ComputeLayerErr(HiddenLayerList[i + 1]);
                    }

                    //Update net weights
                    Parallel.Invoke(() => { OutputLayer.BackwardPass(numStates, curState); },
                                    () =>
                    {
                        Parallel.For(0, numLayers, parallelOption,
                                     i => { HiddenLayerList[i].BackwardPass(numStates, curState); });
                    });
                }
            }

            return(predicted);
        }