Beispiel #1
0
        public CommunicationVisualizer(SharpNeatLib.CPPNs.SubstrateDescription _sd, ModularNetwork _net)
        {
            InitializeComponent();
            //zlevel = _zlevel;

            drawFont  = new System.Drawing.Font("Arial", 8);
            drawBrush = new System.Drawing.SolidBrush(System.Drawing.Color.Black);

            drawFormat = new System.Drawing.StringFormat();

            sd = _sd;

            net = _net;
            _net.UpdateNetworkEvent += networkUpdated;
            activation = new activationLevels[200];
            //  outgoingActivation = new List<float>[200];

            for (int i = 0; i < activation.Length; i++)
            {
                activation[i] = new activationLevels();
                //outgoingActivation[i] = new List<float>();
            }
            penConnection = new Pen(Color.Black);
            penRed        = new Pen(Color.Red);

            this.SetStyle(
                ControlStyles.AllPaintingInWmPaint |
                ControlStyles.UserPaint |
                ControlStyles.DoubleBuffer, true);
        }
 public NetworkDifferencesForm(ModularNetwork _net, List <Robot> _robots)
 {
     InitializeComponent();
     net       = _net;
     robots    = _robots;
     saveAsEPS = false;
     this.Text = "Network Difference Viewer";
     SetBounds(1, 1, 640, 480);
     brush         = new SolidBrush(Color.Red);
     penConnection = new Pen(Color.Black);
     panel1.Width  = (int)(_robots.Count * incX) + 100;
     panel1.Height = (int)(_robots.Count * incY) + 100;
 }
Beispiel #3
0
        public ModuleUsageVisualizerForm(Robot _selectedRobot, ModularNetwork _net, int _evaluationTime)
        {
            evaluationTime           = _evaluationTime;
            _net.UpdateNetworkEvent += networkUpdated;
            InitializeComponent();
            net           = _net;
            selectedRobot = _selectedRobot;
            this.Text     = "Module Usage Visualizer [z=" + selectedRobot.zstack + "]";
            SetBounds(1, 1, 320, 90); // Schrum: Need to tweak?

            //set up double buffering
            this.SetStyle(
                ControlStyles.AllPaintingInWmPaint |
                ControlStyles.UserPaint |
                ControlStyles.DoubleBuffer, true);
        }
        public NetworkVisualizerForm(Robot _selectedRobot, ModularNetwork _net)
        {
            _net.UpdateNetworkEvent += networkUpdated;
            InitializeComponent();
            net           = _net;
            selectedRobot = _selectedRobot;
            this.Text     = "Network Visualizer [z=" + selectedRobot.zstack + "]";
            SetBounds(1, 1, 320, 320);
            brush         = new SolidBrush(Color.Red);
            penConnection = new Pen(Color.Black);
            startX        = 1.1f * dtx;
            startY        = 1.1f * dty;

            //set up double buffering
            this.SetStyle(
                ControlStyles.AllPaintingInWmPaint |
                ControlStyles.UserPaint |
                ControlStyles.DoubleBuffer, true);
        }
Beispiel #5
0
            public Rect(float fixedx, float fixedy, bool from, Rect _parent, byte recPos, float _x1, float _y1, float _x2, float _y2, INetwork genome)
            {
                coordinates = new float[4];

                this.genome = (ModularNetwork)genome;
                this.fixedx = fixedx;
                this.fixedy = fixedy;
                this.recPos = recPos;
                this.from   = from;

                activationLevel = float.NaN;
                visited         = false;
                childs          = new List <Rect>();
                parent          = _parent;

                if (parent != null)
                {
                    parent.childs.Add(this);
                }

                if (_x1 < _x2)
                {
                    x1 = _x1;
                    x2 = _x2;
                }
                else
                {
                    x2 = _x1;
                    x1 = _x2;
                }

                if (_y1 < _y2)
                {
                    y1 = _y1;
                    y2 = _y2;
                }
                else
                {
                    y2 = _y1;
                    y1 = _y2;
                }
            }
Beispiel #6
0
        //TODO not really clean
        public void clearANNSignals(float zstack)
        {
            int            index = 0;
            ModularNetwork net   = ((ModularNetwork)brain);

            foreach (ConnectionGene gene in net.genome.ConnectionGeneList)
            {
                if (gene.coordinates.Length > 4 && !gene.coordinates[4].Equals(float.NaN))
                {
                    if (gene.coordinates[4] != zstack)        //Only valid if robot has z-values
                    {
                        index++;
                        continue;
                    }
                }

                ((ModularNetwork)brain).neuronSignals[net.connections[index].targetNeuronIdx] = 0.0f;
                ((ModularNetwork)brain).neuronSignals[net.connections[index].sourceNeuronIdx] = 0.0f;
            }
        }
Beispiel #7
0
        public TeamVisualizerForm(ModularNetwork _net, int numAgents)
        {
            _net.UpdateNetworkEvent += networkUpdated;
            InitializeComponent();
            net       = _net;
            this.Text = "Team";
            SetBounds(1, 1, 700, 400);
            brush                  = new SolidBrush(Color.Red);
            penConnection          = new Pen(Color.Black);
            penConnection.StartCap = System.Drawing.Drawing2D.LineCap.Flat;
            penConnection.EndCap   = System.Drawing.Drawing2D.LineCap.ArrowAnchor;
            startX                 = 700;
            startY                 = 150;
            zdelta                 = 1.0f / (numAgents - 1);

            //set up double buffering
            this.SetStyle(
                ControlStyles.AllPaintingInWmPaint |
                ControlStyles.UserPaint |
                ControlStyles.DoubleBuffer, true);
        }
        // Schrum: Just added the brainCounter to the old constructor
        public NetworkVisualizerForm(Robot _selectedRobot, ModularNetwork _net, int brainCounter, bool checkZ)
        {
            this.checkZ = checkZ; // Schrum: added
            //Console.WriteLine("Draw:" + brainCounter);
            _net.UpdateNetworkEvent += networkUpdated;
            InitializeComponent();
            net           = _net;
            selectedRobot = _selectedRobot;
            // Schrum: Modified this to identify the brain being accessed
            this.Text = "Network Visualizer [z=" + selectedRobot.zstack + (brainCounter != -1 ? ",s=" + brainCounter : "") + "]";
            SetBounds(1, 1, 320, 320);
            brush         = new SolidBrush(Color.Red);
            penConnection = new Pen(Color.Black);
            startX        = 1.1f * dtx;
            startY        = 1.1f * dty;

            //set up double buffering
            this.SetStyle(
                ControlStyles.AllPaintingInWmPaint |
                ControlStyles.UserPaint |
                ControlStyles.DoubleBuffer, true);
        }
Beispiel #9
0
        /// <summary>
        /// Resets ANN signals to zero. Only valid if robot has z-values.
        /// </summary>
        public void clearANNSignals(float ZStack)
        {
            if (NeatBrain)
            {
                return;
            }
            int            index = 0;
            ModularNetwork net   = ((ModularNetwork)Brain);

            foreach (ConnectionGene gene in net.genome.ConnectionGeneList)
            {
                if (gene.coordinates.Length > 4 && !gene.coordinates[4].Equals(float.NaN))
                {
                    if (gene.coordinates[4] != ZStack)
                    {
                        index++;
                        continue;
                    }
                }

                ((ModularNetwork)Brain).neuronSignals[net.connections[index].targetNeuronIdx] = 0.0f;
                ((ModularNetwork)Brain).neuronSignals[net.connections[index].sourceNeuronIdx] = 0.0f;
            }
        }
Beispiel #10
0
        float[] queryCPPNOutputs(ModularNetwork genome, float x1, float y1, float x2, float y2, float maxXDist, float minYDist)
        {
            float[] coordinates = new float[genome.InputNeuronCount];

            coordinates[0] = x1;
            coordinates[1] = y1;
            coordinates[2] = x2;
            coordinates[3] = y2;
            //coordinates[4] = maxXDist;
            //coordinates[5] = minYDist;

            //Console.WriteLine("Coordinates: ({0}, {1} : {2}, {3})", x1, y1, x2, y2);
            genome.ClearSignals();
            genome.SetInputSignals(coordinates);
            genome.RecursiveActivation();

            float[] outs = new float[genome.OutputNeuronCount];
            for (int i = 0; i < genome.OutputNeuronCount; i++)
            {
                outs[i] = genome.GetOutputSignal(i);
            }

            return(outs);
        }
Beispiel #11
0
 public void networkUpdated(ModularNetwork _net)
 {
     //net = _net;
     Refresh();
 }
Beispiel #12
0
 //This function gets called when the current simulated network sends an update event
 public void networkUpdated(ModularNetwork _net)
 {
     Refresh();
 }
Beispiel #13
0
        static public ModularNetwork DecodeToModularNetwork(NeatGenome.NeatGenome g)
        {
            int inputCount  = g.InputNeuronCount;
            int outputCount = g.OutputNeuronCount;
            int neuronCount = g.NeuronGeneList.Count;

            IActivationFunction[] activationFunctions = new IActivationFunction[neuronCount];
            float[] biasList = new float[neuronCount];

            Dictionary <long, int> neuronLookup = new Dictionary <long, int>(neuronCount);

            // Create an array of the activation functions for each non-module node node in the genome.
            // Start with a bias node if there is one in the genome.
            // The genome's neuron list is assumed to be ordered by type, with the bias node appearing first.
            int neuronGeneIndex = 0;

            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                if (g.NeuronGeneList[neuronGeneIndex].NeuronType != NeuronType.Bias)
                {
                    break;
                }
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
            }
            int biasCount = neuronGeneIndex;

            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
                biasList[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].Bias;
            }

            // Create an array of the activation functions, inputs, and outputs for each module in the genome.
            ModulePacket[] modules = new ModulePacket[g.ModuleGeneList.Count];
            for (int i = g.ModuleGeneList.Count - 1; i >= 0; i--)
            {
                modules[i].function = g.ModuleGeneList[i].Function;
                // Must translate input and output IDs to array locations.
                modules[i].inputLocations = new int[g.ModuleGeneList[i].InputIds.Count];
                for (int j = g.ModuleGeneList[i].InputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].inputLocations[j] = neuronLookup[g.ModuleGeneList[i].InputIds[j]];
                }
                modules[i].outputLocations = new int[g.ModuleGeneList[i].OutputIds.Count];
                for (int j = g.ModuleGeneList[i].OutputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].outputLocations[j] = neuronLookup[g.ModuleGeneList[i].OutputIds[j]];
                }
            }

            // ConnectionGenes point to a neuron's innovation ID. Translate this ID to the neuron's index in the neuron array.
            FloatFastConnection[] connections = new FloatFastConnection[g.ConnectionGeneList.Count];
            for (int connectionGeneIndex = g.ConnectionGeneList.Count - 1; connectionGeneIndex >= 0; connectionGeneIndex--)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionGeneIndex];
                connections[connectionGeneIndex].sourceNeuronIdx = neuronLookup[connectionGene.SourceNeuronId];
                connections[connectionGeneIndex].targetNeuronIdx = neuronLookup[connectionGene.TargetNeuronId];
                connections[connectionGeneIndex].weight          = (float)connectionGene.Weight;

                connections[connectionGeneIndex].learningRate  = connectionGene.learningRate;
                connections[connectionGeneIndex].A             = connectionGene.A;
                connections[connectionGeneIndex].B             = connectionGene.B;
                connections[connectionGeneIndex].C             = connectionGene.C;
                connections[connectionGeneIndex].D             = connectionGene.D;
                connections[connectionGeneIndex].modConnection = connectionGene.modConnection;
            }

            ModularNetwork mn = new ModularNetwork(biasCount, inputCount, outputCount, neuronCount, connections, biasList, activationFunctions, modules);

            if (g.networkAdaptable)
            {
                mn.adaptable = true;
            }
            if (g.networkModulatory)
            {
                mn.modulatory = true;
            }

            mn.genome = g;
            return(mn);
        }
 //This function gets called when the current simulated network sends an update event
 public void networkUpdated(ModularNetwork _net)
 {
     //Console.WriteLine("networkUpdated");
     //net = _net;
     Refresh();
 }
 // Schrum: Old constructor, without a brain counter, simply calls new one with extra argument
 public NetworkVisualizerForm(Robot _selectedRobot, ModularNetwork _net) : this(_selectedRobot, _net, -1, false)
 {
 }
        static public ModularNetwork DecodeToModularNetwork(NeatGenome.NeatGenome g)
        {
            int inputCount  = g.InputNeuronCount;
            int outputCount = g.OutputNeuronCount;
            int neuronCount = g.NeuronGeneList.Count;

            IActivationFunction[] activationFunctions = new IActivationFunction[neuronCount];
            float[] biasList = new float[neuronCount];

            Dictionary <uint, int> neuronLookup = new Dictionary <uint, int>(neuronCount);

            // Schrum: In case there are output neurons out of order
            g.NeuronGeneList.NeuronSortCheck();

            // Create an array of the activation functions for each non-module node node in the genome.
            // Start with a bias node if there is one in the genome.
            // The genome's neuron list is assumed to be ordered by type, with the bias node appearing first.
            int neuronGeneIndex = 0;

            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                if (g.NeuronGeneList[neuronGeneIndex].NeuronType != NeuronType.Bias)
                {
                    break;
                }
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
            }
            int biasCount = neuronGeneIndex;

            // Schrum: debug
            //Console.WriteLine("start (after bias): " + g.GenomeId);

            // Schrum: Debugging
            //NeuronType expectedType = NeuronType.Input;

            for (; neuronGeneIndex < neuronCount; neuronGeneIndex++)
            {
                activationFunctions[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].ActivationFunction;
                // Schrum: Debug

                /*
                 * if (expectedType != g.NeuronGeneList[neuronGeneIndex].NeuronType)
                 * {
                 *  if (expectedType == NeuronType.Input && g.NeuronGeneList[neuronGeneIndex].NeuronType == NeuronType.Output)
                 *  {
                 *      expectedType = NeuronType.Output;
                 *  }
                 *  else if (expectedType == NeuronType.Output && g.NeuronGeneList[neuronGeneIndex].NeuronType == NeuronType.Hidden)
                 *  {
                 *      expectedType = NeuronType.Hidden;
                 *  }
                 *  else
                 *  {
                 *      // Error condition:
                 *      Console.WriteLine("Error with genome: " + g.GenomeId);
                 *
                 *      XmlDocument doc = new XmlDocument();
                 *      XmlGenomeWriterStatic.Write(doc, (SharpNeatLib.NeatGenome.NeatGenome)g);
                 *      FileInfo oFileInfo = new FileInfo("ProblemGenome.xml");
                 *      doc.Save(oFileInfo.FullName);
                 *
                 *      Environment.Exit(1);
                 *  }
                 * }
                 */
                neuronLookup.Add(g.NeuronGeneList[neuronGeneIndex].InnovationId, neuronGeneIndex);
                biasList[neuronGeneIndex] = g.NeuronGeneList[neuronGeneIndex].Bias;
            }

            // Create an array of the activation functions, inputs, and outputs for each module in the genome.
            ModulePacket[] modules = new ModulePacket[g.ModuleGeneList.Count];
            for (int i = g.ModuleGeneList.Count - 1; i >= 0; i--)
            {
                modules[i].function = g.ModuleGeneList[i].Function;
                // Must translate input and output IDs to array locations.
                modules[i].inputLocations = new int[g.ModuleGeneList[i].InputIds.Count];
                for (int j = g.ModuleGeneList[i].InputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].inputLocations[j] = neuronLookup[g.ModuleGeneList[i].InputIds[j]];
                }
                modules[i].outputLocations = new int[g.ModuleGeneList[i].OutputIds.Count];
                for (int j = g.ModuleGeneList[i].OutputIds.Count - 1; j >= 0; j--)
                {
                    modules[i].outputLocations[j] = neuronLookup[g.ModuleGeneList[i].OutputIds[j]];
                }
            }

            // ConnectionGenes point to a neuron's innovation ID. Translate this ID to the neuron's index in the neuron array.
            FloatFastConnection[] connections = new FloatFastConnection[g.ConnectionGeneList.Count];
            for (int connectionGeneIndex = g.ConnectionGeneList.Count - 1; connectionGeneIndex >= 0; connectionGeneIndex--)
            {
                ConnectionGene connectionGene = g.ConnectionGeneList[connectionGeneIndex];
                connections[connectionGeneIndex].sourceNeuronIdx = neuronLookup[connectionGene.SourceNeuronId];
                connections[connectionGeneIndex].targetNeuronIdx = neuronLookup[connectionGene.TargetNeuronId];
                connections[connectionGeneIndex].weight          = (float)connectionGene.Weight;

                connections[connectionGeneIndex].learningRate  = connectionGene.learningRate;
                connections[connectionGeneIndex].A             = connectionGene.A;
                connections[connectionGeneIndex].B             = connectionGene.B;
                connections[connectionGeneIndex].C             = connectionGene.C;
                connections[connectionGeneIndex].D             = connectionGene.D;
                connections[connectionGeneIndex].modConnection = connectionGene.modConnection;
            }

            ModularNetwork mn = new ModularNetwork(biasCount, inputCount, outputCount, g.OutputsPerPolicy, neuronCount, connections, biasList, activationFunctions, modules);

            if (g.networkAdaptable)
            {
                mn.adaptable = true;
            }
            if (g.networkModulatory)
            {
                mn.modulatory = true;
            }

            mn.genome = g;
            return(mn);
        }
Beispiel #17
0
        /*
         * The main method that generations a list of ANN connections based on the information in the
         * underlying hypercube.
         * Input : CPPN, InputPositions, OutputPositions, ES-HyperNEAT parameters
         * Output: Connections, HiddenNodes
         */
        public void generateSubstrate(List <PointF> inputNeuronPositions, List <PointF> outputNeuronPositions,
                                      INetwork genome, int initialDepth, float varianceThreshold, float bandThreshold, int ESIterations,
                                      float divsionThreshold, int maxDepth,
                                      uint inputCount, uint outputCount,
                                      ref ConnectionGeneList connections, ref List <PointF> hiddenNeurons, bool useLeo = false)
        {
            List <TempConnection> tempConnections = new List <TempConnection>();
            int  sourceIndex, targetIndex = 0;
            uint counter = 0;

            this.genome            = (ModularNetwork)genome;
            this.initialDepth      = initialDepth;
            this.maxDepth          = maxDepth;
            this.varianceThreshold = varianceThreshold;
            this.bandThrehold      = bandThreshold;
            this.divisionThreshold = divsionThreshold;

            //CONNECTIONS DIRECTLY FROM INPUT NODES
            sourceIndex = 0;
            foreach (PointF input in inputNeuronPositions)
            {
                // Analyze outgoing connectivity pattern from this input
                QuadPoint root = QuadTreeInitialisation(input.X, input.Y, true, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                // Traverse quadtree and add connections to list
                PruneAndExpress(input.X, input.Y, ref tempConnections, root, true, maxDepth);

                foreach (TempConnection p in tempConnections)
                {
                    PointF newp = new PointF(p.x2, p.y2);

                    targetIndex = hiddenNeurons.IndexOf(newp);
                    if (targetIndex == -1)
                    {
                        targetIndex = hiddenNeurons.Count;
                        hiddenNeurons.Add(newp);
                    }
                    connections.Add(new ConnectionGene(counter++, (sourceIndex), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                }
                sourceIndex++;
            }

            tempConnections.Clear();

            List <PointF> unexploredHiddenNodes = new List <PointF>();

            unexploredHiddenNodes.AddRange(hiddenNeurons);

            for (int step = 0; step < ESIterations; step++)
            {
                foreach (PointF hiddenP in unexploredHiddenNodes)
                {
                    tempConnections.Clear();
                    QuadPoint root = QuadTreeInitialisation(hiddenP.X, hiddenP.Y, true, (int)initialDepth, (int)maxDepth);
                    PruneAndExpress(hiddenP.X, hiddenP.Y, ref tempConnections, root, true, maxDepth);

                    sourceIndex = hiddenNeurons.IndexOf(hiddenP);   //TODO there might a computationally less expensive way

                    foreach (TempConnection p in tempConnections)
                    {
                        PointF newp = new PointF(p.x2, p.y2);

                        targetIndex = hiddenNeurons.IndexOf(newp);
                        if (targetIndex == -1)
                        {
                            targetIndex = hiddenNeurons.Count;
                            hiddenNeurons.Add(newp);
                        }
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount + outputCount), p.weight * HyperNEATParameters.weightRange, new float[] { p.x1, p.y1, p.x2, p.y2 }, p.Outputs));
                    }
                }
                // Remove the just explored nodes
                List <PointF> temp = new List <PointF>();
                temp.AddRange(hiddenNeurons);
                foreach (PointF f in unexploredHiddenNodes)
                {
                    temp.Remove(f);
                }

                unexploredHiddenNodes = temp;
            }

            tempConnections.Clear();

            //CONNECT TO OUTPUT
            targetIndex = 0;
            foreach (PointF outputPos in outputNeuronPositions)
            {
                // Analyze incoming connectivity pattern to this output
                QuadPoint root = QuadTreeInitialisation(outputPos.X, outputPos.Y, false, (int)initialDepth, (int)maxDepth);
                tempConnections.Clear();
                PruneAndExpress(outputPos.X, outputPos.Y, ref tempConnections, root, false, maxDepth);


                PointF target = new PointF(outputPos.X, outputPos.Y);

                foreach (TempConnection t in tempConnections)
                {
                    PointF source = new PointF(t.x1, t.y1);
                    sourceIndex = hiddenNeurons.IndexOf(source);

                    /* New nodes not created here because all the hidden nodes that are
                     *  connected to an input/hidden node are already expressed. */
                    if (sourceIndex != -1)  //only connect if hidden neuron already exists
                    {
                        connections.Add(new ConnectionGene(counter++, (sourceIndex + inputCount + outputCount), (targetIndex + inputCount), t.weight * HyperNEATParameters.weightRange, new float[] { t.x1, t.y1, t.x2, t.y2 }, t.Outputs));
                    }
                }
                targetIndex++;
            }
        }