示例#1
0
        public static void Gate(Neuron from, Neuron to, Neuron gate, double weight)
        {
            var l = new NeuronLink { From = from, To = to, Weight = weight, Gate = gate };

            from.OutProject.Add(l);
            to.InProject.Add(l);
            if (gate != Neuron.NoGate)
            {
                gate.OutGate.Add(l);
            }
        }
示例#2
0
文件: Neuron.cs 项目: benketriel/sknn
 public static void Disconnect(Neuron from, Neuron to)
 {
     from.OutLinks = from.OutLinks.Where(l => l.To != to).ToArray();
     to.InLinks = to.InLinks.Where(l => l.From != from).ToArray();
 }
示例#3
0
文件: Neuron.cs 项目: benketriel/sknn
 public static void Connect(Neuron from, Neuron to)
 {
     var l = new NeuronLink() { From = from, To = to, Weights = new double[from.Size, to.Size] };
     from.OutLinks = from.OutLinks.Union(new[] { l }).ToArray();
     to.InLinks = to.InLinks.Union(new[] { l }).ToArray();
 }
示例#4
0
        private static Network Build(int fanIn, int inputs, params int[] layerSizes)
        {
            var layers = new List<Neuron[]>();
            var layer = new List<Neuron>();

            for (int i = 0; i < inputs; ++i)
            {
                layer.Add(new Neuron("L0:" + i));
            }
            layers.Add(layer.ToArray());

            var layerIndex = 0;
            foreach (var size in layerSizes)
            {
                ++layerIndex;
                layer.Clear();
                for (int i = 0; i < size; ++i)
                {
                    var n = new Neuron("L" + layerIndex + ":" + i);
                    foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand))
                    {
                        Project(layers.Last()[j], n, Hyperparams.NewLinkWeight());
                    }
                    layer.Add(n);
                }
                layers.Add(layer.ToArray());
            }

            var res = new Network { Layers = layers.ToArray() };
            res.SetNeuronLayers();
            return res;
        }
示例#5
0
        public void RemoveNeuron(Neuron n)
        {
            if (n.Name == Neuron.BiasNeuron.Name || n.Name == Neuron.NoGate.Name) return;

            Console.WriteLine(" ### Removed neuron (" + n.Name + ") - Fan: " + n.FanIn() + "x" + n.FanOut() + " ###");

            n.InProject.Union(n.OutProject).Union(n.OutGate).Union(n.OutRemember).ToList().ForEach(RemoveLink);
            RemoveLink(n.SelfLink);
            for (int i = 0; i < Layers.Length; ++i)
            {
                Layers[i] = Layers[i].Except(new[] { n }).ToArray();
            }
            Layers = Layers.Where(l => l.Length > 0).ToArray();
            SetNeuronLayers();
        }
示例#6
0
        public static Network NewLSTM(int fanIn, bool isLastLayerRegular, int inputs, params int[] layerSizes)
        {
            var layers = new List<Neuron[]>();

            layers.Add(Enumerable.Range(0, inputs).Select(i => new Neuron("L0:" + i)).ToArray());
            //var prevSize = layers.Last().Length;

            var layerIndex = 0;
            foreach (var size in layerSizes.Take(isLastLayerRegular ? layerSizes.Length - 1 : layerSizes.Length))
            {
                ++layerIndex;
                var inGates = new List<Neuron>();
                var rememberGates = new List<Neuron>();
                var memories = new List<Neuron>();
                var outGates = new List<Neuron>();
                var results = new List<Neuron>();

                for (int i = 0; i < size; ++i)
                {
                    var inGate = new Neuron("InGate L" + layerIndex + ":" + i);
                    var rememberGate = new Neuron("RememberGate L" + layerIndex + ":" + i);
                    var memoryCell = new Neuron("MemoryCell L" + layerIndex + ":" + i);
                    var outGate = new Neuron("OutGate L" + layerIndex + ":" + i);
                    var resultNode = new Neuron("ResultNode L" + layerIndex + ":" + i);

                    var available = layers.Last().ToList();
                    available.OrderBy(n => n.FanOut());
                    var fanSum = available.Sum(x => x.FanOut());

                    for (int j = 0; j < fanIn && available.Any(); ++j)
                    {
                        var avgFan = (double)fanSum / available.Count;
                        var until = available.FindLastIndex(x => x.FanOut() <= avgFan);
                        var inputNode = available[Hyperparams.Rand.Next(until + 1)];
                        available.Remove(inputNode);
                        fanSum -= inputNode.FanOut();

                        Project(inputNode, resultNode, Hyperparams.NewLinkWeight());
                        Project(inputNode, inGate, Hyperparams.NewLinkWeight());
                        Project(inputNode, rememberGate, Hyperparams.NewLinkWeight());
                        Project(inputNode, outGate, Hyperparams.NewLinkWeight());
                        Gate(inputNode, memoryCell, inGate, Hyperparams.NewLinkWeight());
                    }

                    Gate(memoryCell, resultNode, outGate, Hyperparams.NewLinkWeight());
                    Remember(memoryCell, rememberGate);

                    //Peepholes
                    //Project(memoryCell, inGate, Hyperparams.NewLinkWeight());
                    //Project(memoryCell, rememberGate, Hyperparams.NewLinkWeight());
                    //Project(memoryCell, outGate, Hyperparams.NewLinkWeight());

                    inGates.Add(inGate);
                    outGates.Add(outGate);
                    rememberGates.Add(rememberGate);
                    memories.Add(memoryCell);
                    results.Add(resultNode);
                }

                //foreach(var m1 in memories)
                //{
                //    foreach(var m2 in memories)
                //    {
                //        if (m1 == m2) continue;

                //        Project(m1, m2, Hyperparams.NewLinkWeight());
                //    }
                //}

                layers.Add(inGates.Union(rememberGates).Union(outGates).ToArray());
                layers.Add(memories.ToArray());
                layers.Add(results.ToArray());
            }

            if (isLastLayerRegular)
            {
                ++layerIndex;
                var layer = new List<Neuron>();
                for (int i = 0; i < layerSizes.Last(); ++i)
                {
                    var n = new Neuron("OutL" + layerIndex + ":" + i);
                    foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand))
                    {
                        Project(layers.Last()[j], n, Hyperparams.NewLinkWeight());
                    }
                    layer.Add(n);
                }
                layers.Add(layer.ToArray());
            }

            var res = new Network { Layers = layers.ToArray() };
            res.SetNeuronLayers();
            return res;
        }
示例#7
0
        public void NewRandomNeuron(bool withSelfLink, int fanIn = int.MaxValue)
        {
            var candidInputs = AllNeurons().ToList();
            candidInputs.RemoveAll(n => n.Name == Neuron.BiasNeuron.Name);
            candidInputs.RemoveAll(n => n.Name == Neuron.NoGate.Name);
            candidInputs.RemoveAll(n => n.Layer == Layers.Length - 1);
            candidInputs.RemoveAll(n => n.SelfLink.Gate.Name == Neuron.NoGate.Name);
            candidInputs.RemoveAll(n => n.OutProject.Count == 0);

            fanIn = Math.Min(candidInputs.Count, fanIn);

            var projIn = new List<Neuron>();

            candidInputs = candidInputs.OrderBy(n => n.FanOut()).ToList();
            var fanSum = candidInputs.Sum(x => x.FanOut());

            for (int j = 0; j < fanIn && candidInputs.Any(); ++j)
            {
                var avgFan = (double)fanSum / candidInputs.Count;
                var until = candidInputs.FindLastIndex(x => x.FanOut() <= avgFan);
                var inputNode = candidInputs[Hyperparams.Rand.Next(until + 1)];
                candidInputs.Remove(inputNode);
                fanSum -= inputNode.FanOut();
                projIn.Add(inputNode);
            }

            var baseLayer = projIn.Max(n => n.Layer) + 1;

            var newNeurons = new List<Neuron>();

            if (withSelfLink)
            {
                var inGate = new Neuron("InGate L" + baseLayer + ":R") { Layer = baseLayer };
                var rememberGate = new Neuron("RememberGate L" + baseLayer + ":R") { Layer = baseLayer };
                var memoryCell = new Neuron("MemoryCell L" + (baseLayer + 1) + ":R") { Layer = baseLayer + 1 };
                var outGate = new Neuron("OutGate L" + baseLayer + ":R") { Layer = baseLayer };
                var resultNode = new Neuron("ResultNode L" + (baseLayer + 2) + ":R") { Layer = baseLayer + 2 };

                foreach(var inputNode in projIn)
                {
                    Project(inputNode, resultNode, Hyperparams.NewLinkWeight());
                    Project(inputNode, inGate, Hyperparams.NewLinkWeight());
                    Project(inputNode, rememberGate, Hyperparams.NewLinkWeight());
                    Project(inputNode, outGate, Hyperparams.NewLinkWeight());
                    Gate(inputNode, memoryCell, inGate, Hyperparams.NewLinkWeight());
                }

                Gate(memoryCell, resultNode, outGate, Hyperparams.NewLinkWeight());
                Remember(memoryCell, rememberGate);

                newNeurons.Add(inGate);
                newNeurons.Add(outGate);
                newNeurons.Add(rememberGate);
                newNeurons.Add(memoryCell);
                newNeurons.Add(resultNode);
            }
            else
            {
                var newNeur = new Neuron("R" + candidInputs.Count) { Layer = baseLayer };

                foreach (var pin in projIn)
                {
                    Project(pin, newNeur, Hyperparams.NewLinkWeight());
                }

                newNeurons.Add(newNeur);
            }

            foreach (var n in newNeurons)
            {
                foreach (var pout in Layers.Last())
                {
                    Project(n, pout, Hyperparams.NewLinkWeight());
                }

                if (n.Layer == Layers.Length - 1)
                {
                    var ll = Layers.Take(Layers.Length - 1).ToList();
                    ll.Add(new Neuron[] { n });
                    ll.Add(Layers.Last());
                    Layers = ll.ToArray();
                }
                else
                {
                    Layers[n.Layer] = Layers[n.Layer].Union(new Neuron[] { n }).ToArray();
                }
            }

            SetNeuronLayers();
        }
示例#8
0
 public static void Remember(Neuron target, Neuron gate)
 {
     var l = target.SelfLink;
     l.Gate = gate;
     l.Weight = 1;
     gate.OutRemember.Add(l);
 }
示例#9
0
 public static void Project(Neuron from, Neuron to, double weight)
 {
     Gate(from, to, Neuron.NoGate, weight);
 }