private static Network Build(int fanIn, int inputs, params int[] layerSizes) { var layers = new List<Neuron[]>(); var layer = new List<Neuron>(); for (int i = 0; i < inputs; ++i) { layer.Add(new Neuron("L0:" + i)); } layers.Add(layer.ToArray()); var layerIndex = 0; foreach (var size in layerSizes) { ++layerIndex; layer.Clear(); for (int i = 0; i < size; ++i) { var n = new Neuron("L" + layerIndex + ":" + i); foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand)) { Project(layers.Last()[j], n, Hyperparams.NewLinkWeight()); } layer.Add(n); } layers.Add(layer.ToArray()); } var res = new Network { Layers = layers.ToArray() }; res.SetNeuronLayers(); return res; }
public static void PrintGraph(Network ntwk) { var ns = ntwk.AllNeurons().ToList(); var ls = ntwk.AllLinks().ToList(); var maxWeight = ls.Max(l => Math.Abs(l.Weight)); var s = "CREATE "; var ss = new List<string>(); ns.RemoveAll(n => n.Name == Neuron.NoGate.Name); ns.RemoveAll(n => n.Name == Neuron.BiasNeuron.Name); foreach (var n in ns) { var lbl = n.OutGate.Any() ? "GateNeuron" : n.OutRemember.Any() ? "RememberNeuron" : n.SelfLink.Weight != 0.0 ? "MemoryNeuron" : "Neuron"; var biasL = n.InProject.FirstOrDefault(l => l.From == Neuron.BiasNeuron); var bias = null == biasL ? 0.0 : biasL.Weight; ss.Add("(n" + ns.IndexOf(n) + ":" + lbl + " {Name:'" + n.Name + "', State:'" + n.State + "', Bias:'" + bias + "', Activation:'" + n.Activation + "', Gate:'" + n.GateActivation + "'})"); if (n.SelfLink.Gate.Name != Neuron.NoGate.Name) { ls.Add(n.SelfLink); } } foreach (var l in ls) { if (l.From.Name == Neuron.BiasNeuron.Name) continue; var lbl = ""; if (Math.Abs(l.Weight) / maxWeight > 6.4) { lbl = "Link7"; } else if (Math.Abs(l.Weight) / maxWeight > 3.2) { lbl = "Link6"; } else if (Math.Abs(l.Weight) / maxWeight > 1.6) { lbl = "Link5"; } else if (Math.Abs(l.Weight) / maxWeight > 0.8) { lbl = "Link4"; } else if (Math.Abs(l.Weight) / maxWeight > 0.4) { lbl = "Link3"; } else if (Math.Abs(l.Weight) / maxWeight > 0.2) { lbl = "Link2"; } else if (Math.Abs(l.Weight) / maxWeight > 0.1) { lbl = "Link1"; } else { lbl = "Link0"; } if (l.Weight < 0) { lbl = "Neg" + lbl; } if (l.Gate.Name == Neuron.NoGate.Name) { ss.Add("(n" + ns.IndexOf(l.From) + ")-[:" + lbl + " {Weight:'" + l.Weight + "'}]->(n" + ns.IndexOf(l.To) + ")"); } else if (l.To == l.From) { ss.Add("(n" + ns.IndexOf(l.Gate) + ")-[:RememberLink {Weight:'" + l.Weight + "'}]->(n" + ns.IndexOf(l.To) + ")"); } else { ss.Add("(g" + ls.IndexOf(l) + ":Gate)"); ss.Add("(n" + ns.IndexOf(l.From) + ")-[:Link]->(g" + ls.IndexOf(l) + ")"); ss.Add("(n" + ns.IndexOf(l.Gate) + ")-[:GateLink]->(g" + ls.IndexOf(l) + ")"); ss.Add("(g" + ls.IndexOf(l) + ")-[:" + lbl + " {Weight:'" + l.Weight + "'}]->(n" + ns.IndexOf(l.To) + ")"); } } s += string.Join(", \n", ss); /* * create * (x0:Neuron {State:'1.2345457568', Activation:'0.957983745', Gate:'0.34857638576345'}), * (x1:Neuron {State:'1.2345457568', Activation:'0.957983745', Gate:'0.34857638576345'}), * (y0:Link {weight:'-0.82345457568'}), * (x0)-[:Flow]->(y0)-[:Flow]->(x1) * */ using (var driver = GraphDatabase.Driver("bolt://localhost", AuthTokens.Basic("neo4j", "Aa1234567"))) using (var session = driver.Session()) { session.Run(s); //var result = session.Run("MATCH (a:Person) WHERE a.name = 'Arthur' RETURN a.name AS name, a.title AS title"); //foreach (var record in result) //{ // Console.WriteLine($"{record["title"].As<string>()} {record["name"].As<string>()}"); //} } }
public static Network NewLSTM(int fanIn, bool isLastLayerRegular, int inputs, params int[] layerSizes) { var layers = new List<Neuron[]>(); layers.Add(Enumerable.Range(0, inputs).Select(i => new Neuron("L0:" + i)).ToArray()); //var prevSize = layers.Last().Length; var layerIndex = 0; foreach (var size in layerSizes.Take(isLastLayerRegular ? layerSizes.Length - 1 : layerSizes.Length)) { ++layerIndex; var inGates = new List<Neuron>(); var rememberGates = new List<Neuron>(); var memories = new List<Neuron>(); var outGates = new List<Neuron>(); var results = new List<Neuron>(); for (int i = 0; i < size; ++i) { var inGate = new Neuron("InGate L" + layerIndex + ":" + i); var rememberGate = new Neuron("RememberGate L" + layerIndex + ":" + i); var memoryCell = new Neuron("MemoryCell L" + layerIndex + ":" + i); var outGate = new Neuron("OutGate L" + layerIndex + ":" + i); var resultNode = new Neuron("ResultNode L" + layerIndex + ":" + i); var available = layers.Last().ToList(); available.OrderBy(n => n.FanOut()); var fanSum = available.Sum(x => x.FanOut()); for (int j = 0; j < fanIn && available.Any(); ++j) { var avgFan = (double)fanSum / available.Count; var until = available.FindLastIndex(x => x.FanOut() <= avgFan); var inputNode = available[Hyperparams.Rand.Next(until + 1)]; available.Remove(inputNode); fanSum -= inputNode.FanOut(); Project(inputNode, resultNode, Hyperparams.NewLinkWeight()); Project(inputNode, inGate, Hyperparams.NewLinkWeight()); Project(inputNode, rememberGate, Hyperparams.NewLinkWeight()); Project(inputNode, outGate, Hyperparams.NewLinkWeight()); Gate(inputNode, memoryCell, inGate, Hyperparams.NewLinkWeight()); } Gate(memoryCell, resultNode, outGate, Hyperparams.NewLinkWeight()); Remember(memoryCell, rememberGate); //Peepholes //Project(memoryCell, inGate, Hyperparams.NewLinkWeight()); //Project(memoryCell, rememberGate, Hyperparams.NewLinkWeight()); //Project(memoryCell, outGate, Hyperparams.NewLinkWeight()); inGates.Add(inGate); outGates.Add(outGate); rememberGates.Add(rememberGate); memories.Add(memoryCell); results.Add(resultNode); } //foreach(var m1 in memories) //{ // foreach(var m2 in memories) // { // if (m1 == m2) continue; // Project(m1, m2, Hyperparams.NewLinkWeight()); // } //} layers.Add(inGates.Union(rememberGates).Union(outGates).ToArray()); layers.Add(memories.ToArray()); layers.Add(results.ToArray()); } if (isLastLayerRegular) { ++layerIndex; var layer = new List<Neuron>(); for (int i = 0; i < layerSizes.Last(); ++i) { var n = new Neuron("OutL" + layerIndex + ":" + i); foreach (int j in Lib.Math.NChooseM(layers.Last().Length, fanIn, Hyperparams.Rand)) { Project(layers.Last()[j], n, Hyperparams.NewLinkWeight()); } layer.Add(n); } layers.Add(layer.ToArray()); } var res = new Network { Layers = layers.ToArray() }; res.SetNeuronLayers(); return res; }