Example #1
0
        public NeuralLink(NeuralLayer layer_in, NeuralLayer layer_out, A_Func af)
        {
            Neural_In  = layer_in;
            Neural_Out = layer_out;

            Back_Weight = new float[Neural_In.Number * Neural_Out.Number];
            Back_Bias   = new float[Neural_Out.Number];

            Af = af;
        }
Example #2
0
        public NeuralLink(NeuralLayer layer_in, NeuralLayer layer_out, A_Func af, float isBias, N_Type nt, int win_size)
        {
            Neural_In  = layer_in;
            Neural_Out = layer_out;
            Nt         = nt;
            N_Winsize  = win_size;

            Af        = af;
            isHidBias = isBias;

            Back_Weight = new float[Neural_In.Number * Neural_Out.Number * N_Winsize];
            Back_Bias   = new float[Neural_Out.Number];
        }
Example #3
0
        /// <summary>
        /// Load DSSM
        /// </summary>
        /// <param name="fileName"></param>
        public void DSSM_Model_Load_NF(string fileName)
        {
            FileStream   mstream = new FileStream(fileName, FileMode.Open, FileAccess.Read);
            BinaryReader mreader = new BinaryReader(mstream);

            List <int> layer_info = new List <int>();
            int        mlayer_num = mreader.ReadInt32();

            for (int i = 0; i < mlayer_num; i++)
            {
                layer_info.Add(mreader.ReadInt32());
            }

            FEATURENUM = layer_info[0];

            for (int i = 0; i < layer_info.Count; i++)
            {
                NeuralLayer layer = new NeuralLayer(layer_info[i]);
                neurallayers.Add(layer);
            }

            int mlink_num = mreader.ReadInt32();

            for (int i = 0; i < mlink_num; i++)
            {
                int       in_num          = mreader.ReadInt32();
                int       out_num         = mreader.ReadInt32();
                float     inithidbias     = mreader.ReadSingle();
                float     initweightsigma = mreader.ReadSingle();
                int       mws             = mreader.ReadInt32();
                N_Type    mnt             = (N_Type)mreader.ReadInt32();
                P_Pooling mp = (P_Pooling)mreader.ReadInt32();

                if (mnt == N_Type.Convolution_layer)
                {
                    NeuralLink link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh, 1, mnt, mws);
                    neurallinks.Add(link);
                }
                else if (mnt == N_Type.Fully_Connected)
                {
                    NeuralLink link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh);
                    neurallinks.Add(link);
                }
            }

            for (int i = 0; i < mlink_num; i++)
            {
                int weight_len = mreader.ReadInt32(); // Write(neurallinks[i].Back_Weight.Length);
                if (weight_len != neurallinks[i].Back_Weight.Length)
                {
                    Console.WriteLine("Loading Model Weight Error!  " + weight_len.ToString() + " " + neurallinks[i].Back_Weight.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < weight_len; m++)
                {
                    neurallinks[i].Back_Weight[m] = mreader.ReadSingle();
                }
                int bias_len = mreader.ReadInt32();
                if (bias_len != neurallinks[i].Back_Bias.Length)
                {
                    Console.WriteLine("Loading Model Bias Error!  " + bias_len.ToString() + " " + neurallinks[i].Back_Bias.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < bias_len; m++)
                {
                    neurallinks[i].Back_Bias[m] = mreader.ReadSingle();
                }
            }
            mreader.Close();
            mstream.Close();
        }