Esempio n. 1
0
        public List <float> Forward(int [] feaidx, float[] feaval)
        {
            Layer_Output        output    = new Layer_Output(this);
            List <Sample_Input> inputList = new List <Sample_Input>();

            Sample_Input input = new Sample_Input();

            if (TYPE == 0 || TYPE == 2) //DSSM -- bag of word feature
            {
                Dictionary <int, float> fea = new Dictionary <int, float>();

                for (int i = 0; i < feaidx.Length; i++)
                {
                    fea.Add(feaidx[i], feaval[i]);
                }

                input.Load_BOW(fea);
            }
            else if (TYPE == 1 || TYPE == 3) //CDSSM -- seq of word feature
            {
                throw new Exception("cdssm for this kind of feature NOT SUPPORTED yet!");
            }
            inputList.Add(input);

            forward_activate(inputList, output);

            List <float> result = new List <float>();

            for (int i = 0; i < output.layerOutputs[output.Layer_TOP].Length; i++)
            {
                result.Add(output.layerOutputs[output.Layer_TOP][i]);
            }
            return(result);
        }
Esempio n. 2
0
        public List <float> Forward(string text, Dictionary <string, Dictionary <int, float> > dic, FeatureList featureList)
        {
            Layer_Output        output    = new Layer_Output(this);
            List <Sample_Input> inputList = new List <Sample_Input>();

            string[] sentenceList = PoolIdx >= 1 ?
                                    text.Split(new char[] { '.', '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries)
                : new string[] { text };
            if (sentenceList.Length > MaxPoolSentenceNumber)
            {
                string[] sentenceListTmp = new string[MaxPoolSentenceNumber];
                for (int i = 0; i < MaxPoolSentenceNumber - 1; ++i)
                {
                    sentenceListTmp[i] = sentenceList[i];
                }
                sentenceListTmp[MaxPoolSentenceNumber - 1] = string.Join(" ", sentenceList.Skip(MaxPoolSentenceNumber - 1));
                sentenceList = sentenceListTmp;
            }

            foreach (string sentence in sentenceList)
            {
                Sample_Input input = new Sample_Input();


                if (TYPE == 0 || TYPE == 2) //DSSM -- bag of word feature
                {
                    Dictionary <int, float> fea = new Dictionary <int, float>();

                    int pos = 0;
                    if (featureList.l3g == true)
                    {
                        Dictionary <int, float> tmp = LFE.Feature_Extract_BOW(sentence, pos);     //pos
                        fea  = fea.Concat(tmp).ToDictionary(k => k.Key, v => v.Value);
                        pos += LFE.vocab_dict.Count;
                    }
                    if (featureList.root == true)
                    {
                        int count      = 0;
                        var featStrFeq = TextUtils.String2FeatStrSeq(sentence, 3, 20, FeatureType.root);  // list of root
                        List <Dictionary <int, double> > tmpList = TextUtils.StrFreq2IdFreq(featStrFeq, FeatureType.root, pos, ref count);
                        Dictionary <int, float>          tmp     = TextUtils.MergeList(tmpList).ToDictionary(k => k.Key, v => (float)(v.Value));
                        fea  = fea.Concat(tmp).ToDictionary(k => k.Key, v => v.Value);
                        pos += count;
                    }
                    if (featureList.infl == true)
                    {
                        int count      = 0;
                        var featStrFeq = TextUtils.String2FeatStrSeq(sentence, 3, 20, FeatureType.infl);  // list of inflections
                        List <Dictionary <int, double> > tmpList = TextUtils.StrFreq2IdFreq(featStrFeq, FeatureType.infl, pos, ref count);
                        Dictionary <int, float>          tmp     = TextUtils.MergeList(tmpList).ToDictionary(k => k.Key, v => (float)(v.Value));
                        fea  = fea.Concat(tmp).ToDictionary(k => k.Key, v => v.Value);
                        pos += count;
                    }


                    input.Load_BOW(fea);
                }



                //need to updata
                else if (TYPE == 1 || TYPE == 3) //CDSSM -- seq of word feature
                {
                    List <Dictionary <int, float> > feas = new List <Dictionary <int, float> >();

                    //need to update
                    //if (featureType == FeatureType.we)
                    //{
                    //    feas = LFE.Feature_Extractor_SOW(sentence, dic);
                    //}
                    //else
                    {
                        feas = LFE.Feature_Extractor_SOW(sentence);
                    }

                    input.Load_SOW(feas);
                }



                inputList.Add(input);
            }

            forward_activate(inputList, output);

            List <float> result = new List <float>();

            for (int i = 0; i < output.layerOutputs[output.Layer_TOP].Length; i++)
            {
                result.Add(output.layerOutputs[output.Layer_TOP][i]);
            }
            return(result);
        }
Esempio n. 3
0
        public void forward_activate(List <Sample_Input> dataList, Layer_Output output)
        {
            if (dataList.Count == 0)
            {
                return;
            }
            int layerIndex = 0;
            int batchsize  = dataList.First().batchsize;

            foreach (NeuralLink neurallink in neurallinks)
            {
                if (layerIndex < PoolIdx)
                {
                    for (int i = 0; i < dataList.Count; ++i)
                    {
                        Sample_Input data = dataList[i];

                        ///first layer.
                        if (layerIndex == 0)
                        {
                            if (neurallink.Nt == N_Type.Fully_Connected)
                            {
                                //????data.Norm_BOW(2);
                                BasicMathlib.Sparse_Matrix_Multiply_INTEX(data.Sample_Idx, data.Fea_Idx, data.Fea_Value, data.elementsize,
                                                                          neurallink.Back_Weight, output.layerPoolingSecondary[layerIndex], data.batchsize,
                                                                          neurallink.Neural_Out.Number, neurallink.Neural_In.Number, i * neurallink.Neural_Out.Number);
                            }
                            else if (neurallink.Nt == N_Type.Convolution_layer)
                            {
                                BasicMathlib.Convolution_Sparse_Matrix_Multiply_INTEX(data.Sample_Idx, data.batchsize, data.Seg_Idx, data.Seg_Margin, data.segsize, data.Fea_Idx, data.Fea_Value, data.elementsize,
                                                                                      neurallink.Back_Weight, output.layerPooling[layerIndex], neurallink.Neural_In.Number, neurallink.Neural_Out.Number, neurallink.N_Winsize);
                                BasicMathlib.Max_Pooling(output.layerPooling[layerIndex], data.Sample_Idx, data.batchsize, output.layerPoolingSecondary[layerIndex], output.layerMaxPooling_Index[layerIndex], neurallink.Neural_Out.Number, i * neurallink.Neural_Out.Number);
                            }
                        }
                        else
                        {
                            BasicMathlib.Matrix_Multiply(output.layerPoolingSecondary[layerIndex - 1], neurallink.Back_Weight, output.layerPoolingSecondary[layerIndex], batchsize, neurallink.Neural_Out.Number, neurallink.Neural_In.Number, i * neurallink.Neural_In.Number, i * neurallink.Neural_Out.Number);
                        }
                    }
                    if (neurallink.Af == A_Func.Tanh)
                    {
                        BasicMathlib.Matrix_Add_Tanh(output.layerPoolingSecondary[layerIndex], neurallink.Back_Bias, batchsize, neurallink.Neural_Out.Number * dataList.Count);
                    }
                    if (layerIndex == PoolIdx - 1)
                    {
                        BasicMathlib.Max_PoolingDense(output.layerPoolingSecondary[layerIndex], batchsize, output.layerOutputs[layerIndex], output.layerMaxPooling_IndexSecondary[layerIndex], dataList.Count, neurallink.Neural_Out.Number);
                    }
                }
                else
                {
                    ///first layer.
                    if (layerIndex == 0)
                    {
                        if (neurallink.Nt == N_Type.Fully_Connected)
                        {
                            BasicMathlib.Sparse_Matrix_Multiply_INTEX(dataList[0].Sample_Idx, dataList[0].Fea_Idx, dataList[0].Fea_Value, dataList[0].elementsize,
                                                                      neurallink.Back_Weight, output.layerOutputs[layerIndex], dataList[0].batchsize,
                                                                      neurallink.Neural_Out.Number, neurallink.Neural_In.Number, 0);
                        }
                        else if (neurallink.Nt == N_Type.Convolution_layer)
                        {
                            BasicMathlib.Convolution_Sparse_Matrix_Multiply_INTEX(dataList[0].Sample_Idx, dataList[0].batchsize, dataList[0].Seg_Idx, dataList[0].Seg_Margin, dataList[0].segsize, dataList[0].Fea_Idx, dataList[0].Fea_Value, dataList[0].elementsize,
                                                                                  neurallink.Back_Weight, output.layerPooling[layerIndex], neurallink.Neural_In.Number, neurallink.Neural_Out.Number, neurallink.N_Winsize);

                            BasicMathlib.Max_Pooling(output.layerPooling[layerIndex], dataList[0].Sample_Idx, dataList[0].batchsize, output.layerOutputs[layerIndex], output.layerMaxPooling_Index[layerIndex], neurallink.Neural_Out.Number, 0);
                        }
                    }
                    else
                    {
                        BasicMathlib.Matrix_Multiply(output.layerOutputs[layerIndex - 1], neurallink.Back_Weight, output.layerOutputs[layerIndex], dataList[0].batchsize, neurallink.Neural_Out.Number, neurallink.Neural_In.Number, 0, 0);
                    }
                    if (neurallink.Af == A_Func.Tanh)
                    {
                        BasicMathlib.Matrix_Add_Tanh(output.layerOutputs[layerIndex], neurallink.Back_Bias, dataList[0].batchsize, neurallink.Neural_Out.Number);
                    }
                }
                layerIndex += 1;
            }
        }