public LConvArry(LayerParamsInfo paramInfo, int sampleCount) { ParamsInfo = paramInfo; CoreArry = new LConvolution[sampleCount]; for (int index = 0; index < sampleCount; index++) { CoreArry[index] = new LConvolution(paramInfo); } }
/// <summary> /// /// </summary> /// <param name="layer">准备使用隐藏层数量</param> /// <param name="count"></param> /// <param name="depth"></param> public void InitWeight(int layer, int[] count, int[] depth) { LayerInfo = new LayerParamsInfo[layer]; for (int l = 0; l < layer; l++) { LayerInfo[l] = new LayerParamsInfo(); LayerInfo[l].CurrentLayer = l; LayerInfo[l].InitWeightBias(count[l], depth[l]); } }
public LConvolution(LayerParamsInfo parInfo) { ParamsInfo = parInfo; WehtInfo = parInfo.WehtInfo; this.kernelCount = WehtInfo.Length; this.kernelDepth = WehtInfo[0].Length; this.PadRow = WehtInfo[0][0].W.GetLength(0); this.PadColumn = WehtInfo[0][0].W.GetLength(1); this.Padding = 0; this.Stride = 1; kernelRow = WehtInfo[0][0].W.GetLength(0); kernelColumn = WehtInfo[0][0].W.GetLength(1); //卷积核的深度和单个样本的通道数是一致的。 this._col = new LMatrix[kernelDepth]; }
/// <summary> /// 当前层数据进行卷积、激活、池化操作 /// </summary> /// <param name="input">输入数据</param> /// <param name="layerIndex">使用第几层卷积数据</param> /// <returns></returns> public double[,,,] forward(double[,,,] input, int layerIndex) { //Stopwatch sw = new Stopwatch(); //sw.Start(); //第一层卷积 LayerParamsInfo curLPInfo = LayerInfo[layerIndex]; //int kernelCount = curLayerInfo.Weights.Length; int sampleCount = input.GetLength(0); int kernelDepth = curLPInfo.WehtInfo.Length; double[,,,] oneConvValues = null; if (ConvLayer[layerIndex] == null) { ConvLayer[layerIndex] = new LConvArry(curLPInfo, sampleCount); } oneConvValues = ConvLayer[layerIndex].forward(input); ////激活层 //if (ActivLayer[layerIndex] == null) // ActivLayer[layerIndex] = new LRelu[sampleCount][][]; //double[][][] oneActivValues = new double[kernelDepth][][]; //for (int index = 0; index < sampleCount; index++) //{ // if (ActivLayer[layerIndex][index] == null) // ActivLayer[layerIndex][index] = new LRelu[kernelDepth][][][]; // oneActivValues[index] = ActivLayer[layerIndex][index].forward(oneConvValues[i], false).Matrix; //} double[,,,] oneActivValues = null; if (ActiveLayer[layerIndex] == null) { ActiveLayer[layerIndex] = new LActiveArry(); } oneActivValues = ActiveLayer[layerIndex].forward(oneConvValues); //池化层 double[,,,] onePoolValues = null; if (PoolLayer[layerIndex] == null) { PoolLayer[layerIndex] = new LMaxPool(); } onePoolValues = PoolLayer[layerIndex].forward(oneActivValues); //sw.Stop(); //Debug.Write("正向传播:"+sw.Elapsed.TotalMilliseconds.ToString() + "\n"); return(onePoolValues); }
public void StartTrain(int epoch) { Sources = new CnnSource(epoch); Sources.CollectImgInfo(@"D:\AI\test_img", @"D:\AI\test_img2"); Tuple <double[, , , ], double[, ]> Train = Sources.GetNextEpoch(); MapInput = Train.Item1; LMatrix lable = Train.Item2; #region 初始化隐藏层基本属性和隐藏层权重值 var layers = new LayerAttribute[] { new LayerAttribute { Count = 26, Depth = 3 }, new LayerAttribute { Count = 52, Depth = 26 }, new LayerAttribute { Count = 52, Depth = 38 } }; LayerInfo = new LayerParamsInfo[layers.Length]; for (int index = 0; index < layers.Length; index++) { LayerInfo[index] = new LayerParamsInfo(); LayerInfo[index].CurrentLayer = index; LayerInfo[index].InitWeightBias(layers[index].Count, layers[index].Depth); } #endregion #region 准备训练对象 LOptimizer optimizer = new LAdam(0.1); ThreeLayerNet net = null; bool isRead = false; string path = @"d:\wen\good\"; for (int i = 0; i < 50000; i++) { Debug.Write("开始时间" + DateTime.Now.ToString("HH:mm:ss\n")); var inputFeature = neuralnetworkforward(); if (net == null) { net = new ThreeLayerNet(inputFeature.GetLength(1), 200, 2, isRead: isRead); } net.Input = inputFeature; double loss = net.forward(lable); Debug.Write(string.Format("{0}:{1}当前误差:{2}\n", DateTime.Now.ToString("HH:mm:ss"), i, loss)); double accuracy = net.Accuracy(lable); Debug.Write(string.Format("{0}:{1}当前识别精度:{2}\n", DateTime.Now.ToString("HH:mm:ss"), i, accuracy)); LMatrix dout = net.backward(lable); List <LMatrix> grads = net.Gradient(); List <LMatrix> param = net.GetParams(); string strWB = JsonConvert.SerializeObject(param.Select(x => x.Matrix)); if (!Directory.Exists(path + i.ToString())) { Directory.CreateDirectory(path + i.ToString()); } File.WriteAllText(path + i.ToString() + @"\wb_" + accuracy.ToString() + "_.txt", strWB); var _backdout = dout.ResetSize(38, CurRow, CurColumn); neuralnetworkbackward(_backdout); #region 卷积梯度更新 int infolen = LayerInfo.Length; for (int idx = 0; idx < infolen; idx++) { LayerInfo[idx].UpdateGradient(param, grads); } #endregion optimizer.Update(param, grads); //isRead = true; //记录每次的卷积值信息 File.WriteAllText(path + i.ToString() + @"\LayerInfo.txt", JsonConvert.SerializeObject(LayerInfo)); File.WriteAllText(path + i.ToString() + @"\ConvInfo.txt", JsonConvert.SerializeObject(ConvLayer)); } #endregion }