public NeuralLinkData(NeuralLink neuralLink) { neuralLinkModel = neuralLink; if (neuralLinkModel.Nt == N_Type.Convolution_layer) { layerPoolingOutput = new CudaPieceFloat(PairInputStream.MAXSEGMENT_BATCH * neuralLinkModel.Neural_Out.Number, false, true); layerMaxPooling_Index = new CudaPieceInt(ParameterSetting.BATCH_SIZE * neuralLinkModel.Neural_Out.Number, false, true); } weightDeriv = new CudaPieceFloat(neuralLinkModel.Neural_In.Number * neuralLinkModel.Neural_Out.Number * neuralLinkModel.N_Winsize, false, true); biasDeriv = new CudaPieceFloat(neuralLinkModel.Neural_Out.Number, false, true); weightUpdate = new CudaPieceFloat(neuralLinkModel.Neural_In.Number * neuralLinkModel.Neural_Out.Number * neuralLinkModel.N_Winsize, false, true); biasUpdate = new CudaPieceFloat(neuralLinkModel.Neural_Out.Number, false, true); }
public DNN(int featureSize, int[] layerDim, int[] activation, float[] sigma, int[] arch, int[] wind, bool backupOnly) { NeuralLayer inputlayer = new NeuralLayer(featureSize); neurallayers.Add(inputlayer); for (int i = 0; i < layerDim.Length; i++) { NeuralLayer layer = new NeuralLayer(layerDim[i]); neurallayers.Add(layer); } for (int i = 0; i < layerDim.Length; i++) { NeuralLink link = new NeuralLink(neurallayers[i], neurallayers[i + 1], (A_Func)activation[i], 0, sigma[i], (N_Type)arch[i], wind[i], backupOnly); neurallinks.Add(link); } }
/// <summary> /// given batch of input data. calculate the output. /// </summary> /// <param name="data"></param> //unsafe public void forward_activate( BatchSample_Input data, List<Amplib.AMPArrayInternal> layerOutputs) unsafe public void forward_activate(BatchSample_Input data) { int layerIndex = 0; foreach (NeuralLinkData neurallinkData in neurallinks) { NeuralLink neurallink = neurallinkData.NeuralLinkModel; ///first layer. if (layerIndex == 0) { if (neurallink.Nt == N_Type.Fully_Connected) { MathOperatorManager.GlobalInstance.SEQ_Sparse_Matrix_Multiply_INTEX(data, neurallink.weight, neurallayers[layerIndex + 1].Output, neurallink.Neural_In.Number, neurallink.Neural_Out.Number, neurallink.N_Winsize); } else if (neurallink.Nt == N_Type.Convolution_layer) { MathOperatorManager.GlobalInstance.Convolution_Sparse_Matrix_Multiply_INTEX(data, neurallink.weight, neurallinkData.LayerPoolingOutput, neurallink.Neural_In.Number, neurallink.Neural_Out.Number, neurallink.N_Winsize); MathOperatorManager.GlobalInstance.Max_Pooling(neurallinkData.LayerPoolingOutput, data, neurallayers[layerIndex + 1].Output, neurallinkData.LayerMaxPooling_Index, neurallink.Neural_Out.Number); } } else { MathOperatorManager.GlobalInstance.Matrix_Multipy(neurallayers[layerIndex].Output, neurallink.weight, neurallayers[layerIndex + 1].Output, data.batchsize, neurallink.Neural_In.Number, neurallink.Neural_Out.Number, 0); } if (neurallink.Af == A_Func.Tanh) { MathOperatorManager.GlobalInstance.Matrix_Add_Tanh(neurallayers[layerIndex + 1].Output, neurallink.bias, data.batchsize, neurallink.Neural_Out.Number); } else if (neurallink.Af == A_Func.Linear) { MathOperatorManager.GlobalInstance.Matrix_Add_Vector(neurallayers[layerIndex + 1].Output, neurallink.bias, data.batchsize, neurallink.Neural_Out.Number); } layerIndex += 1; } }
/// <summary> /// /// </summary> /// <param name="fileName"></param> /// <param name="allocateStructureFromEmpty">True will init DNN structure and allocate new space; False will only load data from file</param> public void Model_Load(string fileName, bool allocateStructureFromEmpty) { FileStream mstream = new FileStream(fileName, FileMode.Open, FileAccess.Read); BinaryReader mreader = new BinaryReader(mstream); List <int> layer_info = new List <int>(); int mlayer_num = mreader.ReadInt32(); for (int i = 0; i < mlayer_num; i++) { layer_info.Add(mreader.ReadInt32()); } for (int i = 0; i < layer_info.Count; i++) { if (allocateStructureFromEmpty) { NeuralLayer layer = new NeuralLayer(layer_info[i]); neurallayers.Add(layer); } } int mlink_num = mreader.ReadInt32(); for (int i = 0; i < mlink_num; i++) { int in_num = mreader.ReadInt32(); int out_num = mreader.ReadInt32(); float inithidbias = mreader.ReadSingle(); float initweightsigma = mreader.ReadSingle(); NeuralLink link = null; if (ParameterSetting.LoadModelOldFormat) { if (allocateStructureFromEmpty) { // for back-compatibility only. The old model format donot have those three fields link = new NeuralLink(neurallayers[i], neurallayers[i + 1], A_Func.Tanh, 0, initweightsigma, N_Type.Fully_Connected, 1, false); } } else { // this is the eventually favorable loading format int mws = mreader.ReadInt32(); //// decompose a Int32 integer, whose higher 16 bits store activiation function and lower 16 bits store network type //// In addition, for backward-compatible, neurallinks[i].Af = tanh is stored as 0, neurallinks[i].Af = linear is stored as 1, neurallinks[i].Af = rectified is stored as 2 //// Refer to the Int2A_FuncMapping int afAndNt = mreader.ReadInt32(); A_Func aF = Int2A_Func(afAndNt >> 16); N_Type mnt = (N_Type)(afAndNt & ((1 << 16) - 1)); P_Pooling mp = (P_Pooling)mreader.ReadInt32(); if (allocateStructureFromEmpty) { link = new NeuralLink(neurallayers[i], neurallayers[i + 1], aF, 0, initweightsigma, mnt, mws, false); } } if (allocateStructureFromEmpty) { neurallinks.Add(link); } } for (int i = 0; i < mlink_num; i++) { int weight_len = mreader.ReadInt32(); // Write(neurallinks[i].Back_Weight.Length); if (weight_len != neurallinks[i].Back_Weight.Length) { Console.WriteLine("Loading Model Weight Error! " + weight_len.ToString() + " " + neurallinks[i].Back_Weight.Length.ToString()); Console.ReadLine(); } for (int m = 0; m < weight_len; m++) { neurallinks[i].Back_Weight[m] = mreader.ReadSingle(); } int bias_len = mreader.ReadInt32(); if (bias_len != neurallinks[i].Back_Bias.Length) { Console.WriteLine("Loading Model Bias Error! " + bias_len.ToString() + " " + neurallinks[i].Back_Bias.Length.ToString()); Console.ReadLine(); } for (int m = 0; m < bias_len; m++) { neurallinks[i].Back_Bias[m] = mreader.ReadSingle(); } } mreader.Close(); mstream.Close(); CopyIntoCuda(); }
public void Init(NeuralLink refLink) { weight.Init(refLink.Back_Weight); bias.Init(refLink.Back_Bias); }