public static IEnumerable <INodeLink> MakeLink(this IEnumerable <INode> nodes, int width, int filterSize, int offsetX, int offsetY, Func <int, int, IWeight> getWeight) { var inputNodes = nodes.ToArray(); var height = inputNodes.Length / width; var bias = new NodeLink { InputNode = new ValueNode() { Value = 1 }, Weight = Weight.Make(DLF.GetRandomWeight()) }; var links = ( from y in Enumerable.Range(0, filterSize) from x in Enumerable.Range(0, filterSize) where (offsetX + x) < width where (offsetY + y) < height let nodeIndex = ((offsetY + y) * width) + offsetX + x let inputNode = inputNodes[nodeIndex] select new NodeLink { InputNode = inputNode, Weight = getWeight(x, y) }) .ToArray(); return (new[] { bias }.Concat(links).ToArray()); }
public ConvolutionLayer(ILayer before, ValueTuple <int, int, int> filter , Func <IEnumerable <double>, IEnumerable <double> > activation , Func <double, bool> ignoreUpdate = null) { (int filterSize, int stride, int filterCount) = filter; this.CalcFunction = DLF.CalcFunction; this.ActivationFunction = activation; this.UpdateWeightFunction = DLF.UpdateWeight(ignoreUpdate); int width, height, chSize = 0; if (before is I2DLayer dLayer) { width = dLayer.OutputWidth; height = dLayer.OutputHeight; chSize = dLayer.OutputCh; } else { width = before.Nodes.Count(); height = 1; chSize = 1; } var xSize = (int)Math.Ceiling((width - filterSize) / (double)stride); var ySize = (int)Math.Ceiling((height - filterSize) / (double)stride); this.OutputWidth = xSize; this.OutputHeight = ySize; this.OutputCh = filterCount; this.Nodes = ( from filterIndex in Enumerable.Range(0, filterCount) let weights = ( from fx in Enumerable.Range(0, filterSize) from fy in Enumerable.Range(0, filterSize) select Weight.Make(DLF.GetRandomWeight(), xSize * ySize * chSize)) .ToArray() from y in Enumerable.Range(0, height - filterSize).Where(i => i % stride == 0) from x in Enumerable.Range(0, width - filterSize).Where(i => i % stride == 0) let links = from channelOffset in Enumerable.Range(0, chSize) let lx = before.Nodes.MakeLink(width, filterSize, x, y + (channelOffset * height), (wx, wy) => weights[(wy * filterSize) + wx]).ToArray() select lx let node = new Node(ActivationFunction, links.SelectMany(l => l).ToArray()) select node).ToArray(); }
private IEnumerable <Tuple <IEnumerable <double>, IEnumerable <double> > > Learn(int i, IEnumerable <ILearningData> learningData) { var shuffled = DLF.Shuffle(learningData).ToArray(); var dataCount = 0; var dataIndex = 0; var allNodes = this.Layers.SelectMany(x => x.Nodes).Where(x => !(x is ValueNode)).ToArray(); var watch = new Stopwatch(); watch.Start(); // テストデータ分繰り返す foreach (var data in shuffled) { // 処理する var result = Test(data.Data).ToArray(); // 各Nodeの入力重みを更新 UpdateWeight(data); dataCount = (dataCount + 1) % (this.miniBatch); var isBatchUpdate = dataCount == 0; if (isBatchUpdate) { foreach (var node in allNodes) { node.Apply(this.learningRate); } } else { foreach (var node in allNodes) { node.Reset(); } } dataIndex++; if (isBatchUpdate) { Console.WriteLine(watch.ElapsedMilliseconds / miniBatch / 1000.0); watch.Restart(); } var ret = Tuple.Create(data.Expected, result as IEnumerable <double>); yield return(ret); } }
public PoolingLayer(ILayer before, ValueTuple <int, int> filter) { (int poolingSize, int stride) = filter; this.CalcFunction = n => n.Links.Select(l => l.InputNode.GetValue()).Max(); this.ActivationFunction = v => v; this.UpdateWeightFunction = DLF.UpdateWeight(null, (n, l, d) => { if (n.GetValue() == l.InputNode.GetValue()) { l.InputNode.Delta += d * l.Weight.Value; } }); int width, height, chSize = 0; if (before is I2DLayer dLayer) { width = dLayer.OutputWidth; height = dLayer.OutputHeight; chSize = dLayer.OutputCh; } else { width = before.Nodes.Count(); height = 1; chSize = 1; } var xSize = (int)Math.Ceiling((width - poolingSize) / (double)stride); var ySize = (int)Math.Ceiling((height - poolingSize) / (double)stride); this.OutputWidth = xSize; this.OutputHeight = ySize; this.OutputCh = chSize; this.Nodes = ( from filterIndex in Enumerable.Range(0, chSize) let filterNodes = before.Nodes.Skip(filterIndex * height * width).Take(height * width).ToArray() from y in Enumerable.Range(0, height - poolingSize).Where(i => i % stride == 0) from x in Enumerable.Range(0, width - poolingSize).Where(i => i % stride == 0) let links = filterNodes.MakeLink(width, poolingSize, x, y, (wx, wy) => Weight.Make(1.0)).ToArray() let node = new Node(ActivationFunction, links.Skip(1).ToArray()) select node).ToArray(); }
public static double GetRandomWeight() => 0.01 *DLF.GetRandom();