コード例 #1
0
ファイル: DSSMPredictor.cs プロジェクト: a170811/IWE
        public NeuralLink(NeuralLayer layer_in, NeuralLayer layer_out, A_Func af, float isBias, N_Type nt, int win_size)
        {
            Neural_In  = layer_in;
            Neural_Out = layer_out;
            Nt         = nt;
            N_Winsize  = win_size;

            Af        = af;
            isHidBias = isBias;

            Back_Weight = new float[Neural_In.Number * Neural_Out.Number * N_Winsize];
            Back_Bias   = new float[Neural_Out.Number];
        }
コード例 #2
0
        public NeuralLink(NeuralLayer layer_in, NeuralLayer layer_out, A_Func af, float hidBias, float weightSigma, N_Type nt, int win_size, bool backupOnly)
        {
            Neural_In  = layer_in;
            Neural_Out = layer_out;
            //Neural_In.Number = Neural_In.Number; // *N_Winsize;
            Nt        = nt;
            N_Winsize = win_size;

            Af              = af;
            initHidBias     = hidBias;
            initWeightSigma = weightSigma;

            weight = new CudaPieceFloat(Neural_In.Number * Neural_Out.Number * N_Winsize, true, backupOnly ? false : true);
            bias   = new CudaPieceFloat(Neural_Out.Number, true, backupOnly ? false : true);
        }
コード例 #3
0
        public void Fill_Layer_One(string fileName)
        {
            FileStream   mstream = new FileStream(fileName, FileMode.Open, FileAccess.Read);
            BinaryReader mreader = new BinaryReader(mstream);

            List <int> layer_info = new List <int>();
            int        mlayer_num = mreader.ReadInt32();

            for (int i = 0; i < mlayer_num; i++)
            {
                layer_info.Add(mreader.ReadInt32());
            }

            int mlink_num = mreader.ReadInt32();

            for (int i = 0; i < mlink_num; i++)
            {
                int       in_num          = mreader.ReadInt32();
                int       out_num         = mreader.ReadInt32();
                float     inithidbias     = mreader.ReadSingle();
                float     initweightsigma = mreader.ReadSingle();
                int       mws             = mreader.ReadInt32();
                N_Type    mnt             = (N_Type)mreader.ReadInt32();
                P_Pooling mp = (P_Pooling)mreader.ReadInt32();
            }

            for (int i = 0; i < mlink_num; i++)
            {
                int weight_len = mreader.ReadInt32(); // Write(neurallinks[i].Back_Weight.Length);
                if (weight_len != neurallinks[i].Back_Weight.Length)
                {
                    Console.WriteLine("Loading Model Weight Error!  " + weight_len.ToString() + " " + neurallinks[i].Back_Weight.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < weight_len; m++)
                {
                    neurallinks[i].Back_Weight[m] = mreader.ReadSingle();
                }
                int bias_len = mreader.ReadInt32();
                if (bias_len != neurallinks[i].Back_Bias.Length)
                {
                    Console.WriteLine("Loading Model Bias Error!  " + bias_len.ToString() + " " + neurallinks[i].Back_Bias.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < bias_len; m++)
                {
                    neurallinks[i].Back_Bias[m] = mreader.ReadSingle();
                }
            }
            mreader.Close();
            mstream.Close();

            for (int i = 1; i < neurallinks.Count; i++)
            {
                for (int m = 0; m < neurallinks[i].Back_Bias.Length; m++)
                {
                    neurallinks[i].Back_Bias[m] = 0;
                }
                int wei_num = neurallinks[i].Back_Weight.Length;
                for (int m = 0; m < neurallinks[i].Neural_Out.Number; m++)
                {
                    neurallinks[i].Back_Weight[(m * neurallinks[i].Neural_Out.Number) % wei_num + m] = 1.0f;
                }
            }

            CopyIntoCuda();
        }
コード例 #4
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="fileName"></param>
        /// <param name="allocateStructureFromEmpty">True will init DNN structure and allocate new space; False will only load data from file</param>
        public void Model_Load(string fileName, bool allocateStructureFromEmpty)
        {
            FileStream   mstream = new FileStream(fileName, FileMode.Open, FileAccess.Read);
            BinaryReader mreader = new BinaryReader(mstream);

            List <int> layer_info = new List <int>();
            int        mlayer_num = mreader.ReadInt32();

            for (int i = 0; i < mlayer_num; i++)
            {
                layer_info.Add(mreader.ReadInt32());
            }
            for (int i = 0; i < layer_info.Count; i++)
            {
                if (allocateStructureFromEmpty)
                {
                    NeuralLayer layer = new NeuralLayer(layer_info[i]);
                    neurallayers.Add(layer);
                }
            }

            int mlink_num = mreader.ReadInt32();

            for (int i = 0; i < mlink_num; i++)
            {
                int   in_num          = mreader.ReadInt32();
                int   out_num         = mreader.ReadInt32();
                float inithidbias     = mreader.ReadSingle();
                float initweightsigma = mreader.ReadSingle();

                NeuralLink link = null;
                if (ParameterSetting.LoadModelOldFormat)
                {
                    if (allocateStructureFromEmpty)
                    {
                        // for back-compatibility only. The old model format donot have those three fields
                        link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh, 0, initweightsigma, N_Type.Fully_Connected, 1, false);
                    }
                }
                else
                {
                    // this is the eventually favorable loading format
                    int mws = mreader.ReadInt32();
                    //// decompose a Int32 integer, whose higher 16 bits store activiation function and lower 16 bits store network type
                    //// In addition, for backward-compatible, neurallinks[i].Af = tanh is stored as 0, neurallinks[i].Af = linear is stored as 1, neurallinks[i].Af = rectified is stored as 2
                    //// Refer to the Int2A_FuncMapping
                    int       afAndNt = mreader.ReadInt32();
                    A_Func    aF      = Int2A_Func(afAndNt >> 16);
                    N_Type    mnt     = (N_Type)(afAndNt & ((1 << 16) - 1));
                    P_Pooling mp      = (P_Pooling)mreader.ReadInt32();
                    if (allocateStructureFromEmpty)
                    {
                        link = new NeuralLink(neurallayers[i], neurallayers[i + 1], aF, 0, initweightsigma, mnt, mws, false);
                    }
                }
                if (allocateStructureFromEmpty)
                {
                    neurallinks.Add(link);
                }
            }

            for (int i = 0; i < mlink_num; i++)
            {
                int weight_len = mreader.ReadInt32(); // Write(neurallinks[i].Back_Weight.Length);
                if (weight_len != neurallinks[i].Back_Weight.Length)
                {
                    Console.WriteLine("Loading Model Weight Error!  " + weight_len.ToString() + " " + neurallinks[i].Back_Weight.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < weight_len; m++)
                {
                    neurallinks[i].Back_Weight[m] = mreader.ReadSingle();
                }
                int bias_len = mreader.ReadInt32();
                if (bias_len != neurallinks[i].Back_Bias.Length)
                {
                    Console.WriteLine("Loading Model Bias Error!  " + bias_len.ToString() + " " + neurallinks[i].Back_Bias.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < bias_len; m++)
                {
                    neurallinks[i].Back_Bias[m] = mreader.ReadSingle();
                }
            }
            mreader.Close();
            mstream.Close();
            CopyIntoCuda();
        }
コード例 #5
0
ファイル: DSSMPredictor.cs プロジェクト: a170811/IWE
        /// <summary>
        /// Load DSSM
        /// </summary>
        /// <param name="fileName"></param>
        public void DSSM_Model_Load_NF(string fileName)
        {
            FileStream   mstream = new FileStream(fileName, FileMode.Open, FileAccess.Read);
            BinaryReader mreader = new BinaryReader(mstream);

            List <int> layer_info = new List <int>();
            int        mlayer_num = mreader.ReadInt32();

            for (int i = 0; i < mlayer_num; i++)
            {
                layer_info.Add(mreader.ReadInt32());
            }

            FEATURENUM = layer_info[0];

            for (int i = 0; i < layer_info.Count; i++)
            {
                NeuralLayer layer = new NeuralLayer(layer_info[i]);
                neurallayers.Add(layer);
            }

            int mlink_num = mreader.ReadInt32();

            for (int i = 0; i < mlink_num; i++)
            {
                int       in_num          = mreader.ReadInt32();
                int       out_num         = mreader.ReadInt32();
                float     inithidbias     = mreader.ReadSingle();
                float     initweightsigma = mreader.ReadSingle();
                int       mws             = mreader.ReadInt32();
                N_Type    mnt             = (N_Type)mreader.ReadInt32();
                P_Pooling mp = (P_Pooling)mreader.ReadInt32();

                if (mnt == N_Type.Convolution_layer)
                {
                    NeuralLink link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh, 1, mnt, mws);
                    neurallinks.Add(link);
                }
                else if (mnt == N_Type.Fully_Connected)
                {
                    NeuralLink link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh);
                    neurallinks.Add(link);
                }
            }

            for (int i = 0; i < mlink_num; i++)
            {
                int weight_len = mreader.ReadInt32(); // Write(neurallinks[i].Back_Weight.Length);
                if (weight_len != neurallinks[i].Back_Weight.Length)
                {
                    Console.WriteLine("Loading Model Weight Error!  " + weight_len.ToString() + " " + neurallinks[i].Back_Weight.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < weight_len; m++)
                {
                    neurallinks[i].Back_Weight[m] = mreader.ReadSingle();
                }
                int bias_len = mreader.ReadInt32();
                if (bias_len != neurallinks[i].Back_Bias.Length)
                {
                    Console.WriteLine("Loading Model Bias Error!  " + bias_len.ToString() + " " + neurallinks[i].Back_Bias.Length.ToString());
                    Console.ReadLine();
                }
                for (int m = 0; m < bias_len; m++)
                {
                    neurallinks[i].Back_Bias[m] = mreader.ReadSingle();
                }
            }
            mreader.Close();
            mstream.Close();
        }