예제 #1
0
파일: Network.cs 프로젝트: benketriel/sknn
        private static Network Build(int fanIn, int inputs, params int[] layerSizes)
        {
            var layers = new List<Neuron[]>();
            var layer = new List<Neuron>();

            for (int i = 0; i < inputs; ++i)
            {
                layer.Add(new Neuron("L0:" + i));
            }
            layers.Add(layer.ToArray());

            var layerIndex = 0;
            foreach (var size in layerSizes)
            {
                ++layerIndex;
                layer.Clear();
                for (int i = 0; i < size; ++i)
                {
                    var n = new Neuron("L" + layerIndex + ":" + i);
                    foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand))
                    {
                        Project(layers.Last()[j], n, Hyperparams.NewLinkWeight());
                    }
                    layer.Add(n);
                }
                layers.Add(layer.ToArray());
            }

            var res = new Network { Layers = layers.ToArray() };
            res.SetNeuronLayers();
            return res;
        }
예제 #2
0
파일: Network.cs 프로젝트: benketriel/sknn
        public static Network NewLSTM(int fanIn, bool isLastLayerRegular, int inputs, params int[] layerSizes)
        {
            var layers = new List<Neuron[]>();

            layers.Add(Enumerable.Range(0, inputs).Select(i => new Neuron("L0:" + i)).ToArray());
            //var prevSize = layers.Last().Length;

            var layerIndex = 0;
            foreach (var size in layerSizes.Take(isLastLayerRegular ? layerSizes.Length - 1 : layerSizes.Length))
            {
                ++layerIndex;
                var inGates = new List<Neuron>();
                var rememberGates = new List<Neuron>();
                var memories = new List<Neuron>();
                var outGates = new List<Neuron>();
                var results = new List<Neuron>();

                for (int i = 0; i < size; ++i)
                {
                    var inGate = new Neuron("InGate L" + layerIndex + ":" + i);
                    var rememberGate = new Neuron("RememberGate L" + layerIndex + ":" + i);
                    var memoryCell = new Neuron("MemoryCell L" + layerIndex + ":" + i);
                    var outGate = new Neuron("OutGate L" + layerIndex + ":" + i);
                    var resultNode = new Neuron("ResultNode L" + layerIndex + ":" + i);

                    var available = layers.Last().ToList();
                    available.OrderBy(n => n.FanOut());
                    var fanSum = available.Sum(x => x.FanOut());

                    for (int j = 0; j < fanIn && available.Any(); ++j)
                    {
                        var avgFan = (double)fanSum / available.Count;
                        var until = available.FindLastIndex(x => x.FanOut() <= avgFan);
                        var inputNode = available[Hyperparams.Rand.Next(until + 1)];
                        available.Remove(inputNode);
                        fanSum -= inputNode.FanOut();

                        Project(inputNode, resultNode, Hyperparams.NewLinkWeight());
                        Project(inputNode, inGate, Hyperparams.NewLinkWeight());
                        Project(inputNode, rememberGate, Hyperparams.NewLinkWeight());
                        Project(inputNode, outGate, Hyperparams.NewLinkWeight());
                        Gate(inputNode, memoryCell, inGate, Hyperparams.NewLinkWeight());
                    }

                    Gate(memoryCell, resultNode, outGate, Hyperparams.NewLinkWeight());
                    Remember(memoryCell, rememberGate);

                    //Peepholes
                    //Project(memoryCell, inGate, Hyperparams.NewLinkWeight());
                    //Project(memoryCell, rememberGate, Hyperparams.NewLinkWeight());
                    //Project(memoryCell, outGate, Hyperparams.NewLinkWeight());

                    inGates.Add(inGate);
                    outGates.Add(outGate);
                    rememberGates.Add(rememberGate);
                    memories.Add(memoryCell);
                    results.Add(resultNode);
                }

                //foreach(var m1 in memories)
                //{
                //    foreach(var m2 in memories)
                //    {
                //        if (m1 == m2) continue;

                //        Project(m1, m2, Hyperparams.NewLinkWeight());
                //    }
                //}

                layers.Add(inGates.Union(rememberGates).Union(outGates).ToArray());
                layers.Add(memories.ToArray());
                layers.Add(results.ToArray());
            }

            if (isLastLayerRegular)
            {
                ++layerIndex;
                var layer = new List<Neuron>();
                for (int i = 0; i < layerSizes.Last(); ++i)
                {
                    var n = new Neuron("OutL" + layerIndex + ":" + i);
                    foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand))
                    {
                        Project(layers.Last()[j], n, Hyperparams.NewLinkWeight());
                    }
                    layer.Add(n);
                }
                layers.Add(layer.ToArray());
            }

            var res = new Network { Layers = layers.ToArray() };
            res.SetNeuronLayers();
            return res;
        }