GetWeight() public method

Get the weight between the two layers.
public GetWeight ( int fromLayer, int fromNeuron, int toNeuron ) : double
fromLayer int The from layer.
fromNeuron int The from neuron.
toNeuron int The to neuron.
return double
        /// <summary>
        /// Connect layers from a BasicNetwork. Used internally only.
        /// </summary>
        /// <param name="network">The BasicNetwork.</param>
        /// <param name="fromLayerIdx">The from layer index.</param>
        /// <param name="source">The from layer.</param>
        /// <param name="target">The target.</param>
        private void ConnectLayersFromBasic(BasicNetwork network,
            int fromLayerIdx, IFreeformLayer source, IFreeformLayer target)
        {
            for (int targetNeuronIdx = 0; targetNeuronIdx < target.Count; targetNeuronIdx++)
            {
                for (int sourceNeuronIdx = 0; sourceNeuronIdx < source.Count; sourceNeuronIdx++)
                {
                    IFreeformNeuron sourceNeuron = source.Neurons[sourceNeuronIdx];
                    IFreeformNeuron targetNeuron = target.Neurons[targetNeuronIdx];

                    // neurons with no input (i.e. bias neurons)
                    if (targetNeuron.InputSummation == null)
                    {
                        continue;
                    }

                    IFreeformConnection connection = _connectionFactory
                        .Factor(sourceNeuron, targetNeuron);
                    sourceNeuron.AddOutput(connection);
                    targetNeuron.AddInput(connection);
                    double weight = network.GetWeight(fromLayerIdx,
                        sourceNeuronIdx, targetNeuronIdx);
                    connection.Weight = weight;
                }
            }
        }
Ejemplo n.º 2
0
 public AnalyzeNetwork(BasicNetwork network)
 {
     int num3;
     int num4;
     int layerTotalNeuronCount;
     int layerNeuronCount;
     int num7;
     int num8;
     double num9;
     int num10;
     int num11;
     double num12;
     int num = 0;
     int num2 = 0;
     IList<double> list = new List<double>();
     IList<double> list2 = new List<double>();
     if (0 != 0)
     {
         goto Label_0115;
     }
     IList<double> values = new List<double>();
     if ((((uint) num2) | 1) != 0)
     {
         num3 = 0;
         goto Label_00C9;
     }
     Label_000B:
     this._xd16d54155d6ebc35 = new NumericRange(values);
     this._x8158512e31b17fc4 = EngineArray.ListToDouble(list2);
     this._x7cd672b98e9d2817 = EngineArray.ListToDouble(values);
     this._x5933bfade0487265 = EngineArray.ListToDouble(list);
     Label_003D:
     if (((uint) layerNeuronCount) >= 0)
     {
     }
     return;
     Label_0057:
     if ((((uint) num2) + ((uint) num2)) < 0)
     {
         goto Label_0317;
     }
     this._x465229d781237721 = num2;
     if (((uint) layerTotalNeuronCount) > uint.MaxValue)
     {
         goto Label_003D;
     }
     this._x2f33d779e5a20b28 = new NumericRange(list2);
     if ((((uint) num3) + ((uint) layerTotalNeuronCount)) > uint.MaxValue)
     {
         goto Label_01E8;
     }
     this._x232c44e69c86297f = new NumericRange(list);
     if ((((uint) num11) + ((uint) layerTotalNeuronCount)) <= uint.MaxValue)
     {
         goto Label_000B;
     }
     goto Label_01F2;
     Label_00C3:
     num3++;
     Label_00C9:
     if (num3 < (network.LayerCount - 1))
     {
         goto Label_0317;
     }
     this._x0dcd8230e4ec0670 = num;
     goto Label_0057;
     Label_00FF:
     if (num11 < layerNeuronCount)
     {
         num12 = network.GetWeight(num3, num10, num11);
         goto Label_0127;
     }
     goto Label_00C3;
     Label_0115:
     values.Add(num12);
     num2++;
     if (0 == 0)
     {
         num11++;
         if (((uint) num9) < 0)
         {
             goto Label_02BF;
         }
         goto Label_00FF;
     }
     goto Label_0184;
     Label_0127:
     if (!network.Structure.ConnectionLimited)
     {
         goto Label_014B;
     }
     Label_0134:
     if (Math.Abs(num12) < network.Structure.ConnectionLimit)
     {
         num++;
         if ((((uint) num8) - ((uint) num12)) >= 0)
         {
             goto Label_0167;
         }
         goto Label_000B;
     }
     Label_014B:
     list.Add(num12);
     if ((((uint) num7) & 0) == 0)
     {
         goto Label_0115;
     }
     Label_0167:
     if ((((uint) num2) & 0) == 0)
     {
         goto Label_014B;
     }
     goto Label_0127;
     Label_0184:
     if (4 != 0)
     {
         goto Label_00C3;
     }
     goto Label_0057;
     Label_01E8:
     num11 = 0;
     goto Label_00FF;
     Label_01F2:
     num8++;
     Label_01F8:
     if (num8 < layerNeuronCount)
     {
         goto Label_02BF;
     }
     num7++;
     Label_0207:
     if (num7 < num4)
     {
         num8 = 0;
         goto Label_01F8;
     }
     if (((((uint) num8) | 1) != 0) && (num4 == layerTotalNeuronCount))
     {
         goto Label_0184;
     }
     num10 = num4;
     goto Label_01E8;
     Label_02BF:
     num9 = network.GetWeight(num3, num7, num8);
     if (0 == 0)
     {
         if (network.Structure.ConnectionLimited && (((((uint) num10) + ((uint) num)) < 0) || (Math.Abs(num9) < network.Structure.ConnectionLimit)))
         {
             num++;
         }
         list2.Add(num9);
     }
     values.Add(num9);
     num2++;
     goto Label_01F2;
     Label_0317:
     num4 = network.GetLayerNeuronCount(num3);
     layerTotalNeuronCount = network.GetLayerTotalNeuronCount(num3);
     if (((uint) num7) > uint.MaxValue)
     {
         goto Label_0134;
     }
     layerNeuronCount = network.GetLayerNeuronCount(num3 + 1);
     num7 = 0;
     if ((((uint) layerTotalNeuronCount) + ((uint) layerTotalNeuronCount)) >= 0)
     {
     }
     goto Label_0207;
 }
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public override void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount = network.GetLayerNeuronCount(fromLayer + 1);

            for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
            {
                double n = 0.0;
                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    n += w * w;
                }
                n = Math.Sqrt(n);


                for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
                {
                    double w = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    w = _beta * w / n;
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, w);
                }
            }
        }
        /// <summary>
        /// Randomize one level of a neural network.
        /// </summary>
        ///
        /// <param name="network">The network to randomize</param>
        /// <param name="fromLayer">The from level to randomize.</param>
        public virtual void Randomize(BasicNetwork network, int fromLayer)
        {
            int fromCount = network.GetLayerTotalNeuronCount(fromLayer);
            int toCount = network.GetLayerNeuronCount(fromLayer + 1);

            for (int fromNeuron = 0; fromNeuron < fromCount; fromNeuron++)
            {
                for (int toNeuron = 0; toNeuron < toCount; toNeuron++)
                {
                    double v = network.GetWeight(fromLayer, fromNeuron, toNeuron);
                    v = Randomize(v);
                    network.SetWeight(fromLayer, fromNeuron, toNeuron, v);
                }
            }
        }
Ejemplo n.º 5
0
 public virtual void Randomize(BasicNetwork network, int fromLayer)
 {
     int num4;
     double num5;
     int layerTotalNeuronCount = network.GetLayerTotalNeuronCount(fromLayer);
     int layerNeuronCount = network.GetLayerNeuronCount(fromLayer + 1);
     int fromNeuron = 0;
     goto Label_002C;
     Label_000D:
     fromNeuron++;
     if ((((uint) fromNeuron) + ((uint) fromNeuron)) > uint.MaxValue)
     {
         goto Label_004B;
     }
     if (0 != 0)
     {
         goto Label_003C;
     }
     Label_002C:
     if (fromNeuron < layerTotalNeuronCount)
     {
         goto Label_0067;
     }
     return;
     Label_003C:
     network.SetWeight(fromLayer, fromNeuron, num4, num5);
     num4++;
     Label_004B:
     if (num4 < layerNeuronCount)
     {
         num5 = network.GetWeight(fromLayer, fromNeuron, num4);
         if (((uint) num5) >= 0)
         {
             num5 = this.Randomize(num5);
             goto Label_003C;
         }
         goto Label_000D;
     }
     if ((((uint) fromLayer) + ((uint) fromLayer)) >= 0)
     {
         goto Label_000D;
     }
     Label_0067:
     num4 = 0;
     goto Label_004B;
 }
Ejemplo n.º 6
0
 public override void Randomize(BasicNetwork network, int fromLayer)
 {
     int num2;
     int num3;
     double num4;
     int num5;
     double num6;
     int num7;
     double num8;
     int layerTotalNeuronCount = network.GetLayerTotalNeuronCount(fromLayer);
     goto Label_00DF;
     Label_0011:
     if (num3 < num2)
     {
         num4 = 0.0;
         num5 = 0;
     }
     else if ((((uint) num8) - ((uint) layerTotalNeuronCount)) >= 0)
     {
         return;
     }
     while (true)
     {
         if (num5 >= layerTotalNeuronCount)
         {
             num4 = Math.Sqrt(num4);
             num7 = 0;
             if ((((uint) num4) + ((uint) num2)) < 0)
             {
                 break;
             }
             goto Label_0065;
         }
         num6 = network.GetWeight(fromLayer, num5, num3);
         num4 += num6 * num6;
         num5++;
     }
     Label_0044:
     if ((((uint) fromLayer) + ((uint) num6)) > uint.MaxValue)
     {
         goto Label_00DF;
     }
     num7++;
     Label_0065:
     if (num7 < layerTotalNeuronCount)
     {
         num8 = network.GetWeight(fromLayer, num7, num3);
     }
     else
     {
         num3++;
         goto Label_0011;
     }
     Label_009C:
     num8 = (this._xd7d571ecee49d1e4 * num8) / num4;
     network.SetWeight(fromLayer, num7, num3, num8);
     goto Label_0044;
     Label_00DF:
     num2 = network.GetLayerNeuronCount(fromLayer + 1);
     if (((uint) num8) > uint.MaxValue)
     {
         goto Label_009C;
     }
     num3 = 0;
     goto Label_0011;
 }
        public List<double[]> Learn(double[][] data, double[][] ideal)
        {
            double[][] origData = (double[][])data.Clone();
            int n = data.Length;
            int m = data[0].Length;
            double[][] output = new double[n][];
            double[][] sgmNeighbours = new double[n][];
            for (var i = 0; i < n; i++)
            {
                double[] sgmN = new double[SegmentationData.SEGMENT_NEIGHBOURS];
                Array.Copy(data[i], m - SegmentationData.SEGMENT_NEIGHBOURS, sgmN, 0, SegmentationData.SEGMENT_NEIGHBOURS);
                sgmNeighbours[i] = sgmN;
                data[i] = data[i].Take(m - SegmentationData.SEGMENT_NEIGHBOURS).ToArray();
                output[i] = new double[m - SegmentationData.SEGMENT_NEIGHBOURS];
                data[i].CopyTo(output[i], 0);
            }

            IMLDataSet trainingSet = new BasicMLDataSet(data, output);

            int inputLayerSize = layersConfiguration[0] - SegmentationData.SEGMENT_NEIGHBOURS;
            int trainingLayerSize = layersConfiguration[1];
            BasicNetwork oneLayerAutoencoder = new BasicNetwork();
            oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize));
            oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize));
            oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize));
            oneLayerAutoencoder.Structure.FinalizeStructure();
            oneLayerAutoencoder.Reset();

            IMLTrain train = new ResilientPropagation(oneLayerAutoencoder, trainingSet);
            //IMLTrain train = new Backpropagation(oneLayerAutoencoder, trainingSet, LEARNING_RATE, MOMENTUM);

            int epoch = 1;
            List<double[]> errors = new List<double[]>();
            double[] trainError = new double[AUTOENCODER_MAX_ITER];

            do
            {
                train.Iteration();
                ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error;
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                trainError[epoch - 1] = train.Error;
                epoch++;
                //errors.Add(train.Error);
            } while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER);
            errors.Add(trainError);
            train.FinishTraining();

            BasicNetwork encoder = new BasicNetwork();
            encoder.AddLayer(new BasicLayer(null, BIAS, oneLayerAutoencoder.GetLayerNeuronCount(0)));
            encoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, oneLayerAutoencoder.GetLayerNeuronCount(1)));
            encoder.Structure.FinalizeStructure();
            encoder.Reset();

            //przypisanie wag do encodera
            for (int i = 0; i < encoder.LayerCount - 1; i++)
                for (int f = 0; f < encoder.GetLayerNeuronCount(i); f++)
                    for (int t = 0; t < encoder.GetLayerNeuronCount(i + 1); t++)
                        encoder.SetWeight(i, f, t, oneLayerAutoencoder.GetWeight(i, f, t));

            //Compare2Networks(oneLayerAutoencoder, encoder);

            for(int l=1; l<layersConfiguration.Count -2; l++)
            {
                inputLayerSize = layersConfiguration[l];
                trainingLayerSize = layersConfiguration[l+1];
                oneLayerAutoencoder = new BasicNetwork();
                oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize));
                oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize));
                oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize));
                oneLayerAutoencoder.Structure.FinalizeStructure();
                oneLayerAutoencoder.Reset();

                //liczenie outputu z dotychczasowego encodera
                double[][] input = new double[n][];
                double[][] newOutput = new double[n][];
                for(int ni = 0; ni <n; ni++)
                {
                    IMLData res = encoder.Compute(new BasicMLData(data[ni]));
                    double[] resD = new double[res.Count];
                    for(int i=0; i<res.Count; i++)
                        resD[i] = res[i];
                    input[ni] = resD;
                    newOutput[ni] = new double[res.Count];
                    input[ni].CopyTo(newOutput[ni], 0);
                }

                BasicMLDataSet newTrainingSet = new BasicMLDataSet(input, newOutput);
                train = new ResilientPropagation(oneLayerAutoencoder, newTrainingSet);
                //train = new Backpropagation(oneLayerAutoencoder, newTrainingSet, LEARNING_RATE, MOMENTUM);

                epoch = 1;
                trainError = new double[AUTOENCODER_MAX_ITER];
                do
                {
                    train.Iteration();
                    ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error;
                    Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                    trainError[epoch - 1] = train.Error;
                    epoch++;
                } while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER);
                errors.Add(trainError);
                train.FinishTraining();

                BasicNetwork extendedEncoder = new BasicNetwork();
                extendedEncoder.AddLayer(new BasicLayer(null, BIAS, encoder.GetLayerNeuronCount(0)));
                for (int el = 1; el < encoder.LayerCount; el++ )
                    extendedEncoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, encoder.GetLayerNeuronCount(el)));
                extendedEncoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, oneLayerAutoencoder.GetLayerNeuronCount(1)));
                extendedEncoder.Structure.FinalizeStructure();

                //przypisanie wag do extendedencodera
                for (int i = 0; i < extendedEncoder.LayerCount - 1; i++)
                {
                    if (i < encoder.LayerCount-1)
                    {
                        for (int f = 0; f < extendedEncoder.GetLayerNeuronCount(i); f++)
                            for (int t = 0; t < extendedEncoder.GetLayerNeuronCount(i + 1); t++)
                                extendedEncoder.SetWeight(i, f, t, encoder.GetWeight(i, f, t));
                    }
                    else
                    {
                        for (int f = 0; f < extendedEncoder.GetLayerNeuronCount(i); f++)
                            for (int t = 0; t < extendedEncoder.GetLayerNeuronCount(i + 1); t++)
                                extendedEncoder.SetWeight(i, f, t, oneLayerAutoencoder.GetWeight(0, f, t));
                    }
                }
                encoder = extendedEncoder;

            }

            //tworzenie struktury ostatecznej sieci
            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(null, BIAS, encoder.GetLayerNeuronCount(0) + SegmentationData.SEGMENT_NEIGHBOURS));
            for (int el = 1; el < encoder.LayerCount; el++)
                network.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, encoder.GetLayerNeuronCount(el) + SegmentationData.SEGMENT_NEIGHBOURS));
            network.AddLayer(new BasicLayer(CurrentActivationFunction(), false, layersConfiguration[layersConfiguration.Count - 1]));
            network.Structure.FinalizeStructure();
            network.Reset();

            /*
            for (int i = 0; i < encoder.LayerCount - 1; i++)
                for (int f = 0; f < encoder.GetLayerNeuronCount(i); f++)
                    for (int t = 0; t < encoder.GetLayerNeuronCount(i + 1); t++)
                            network.SetWeight(i, f, t, encoder.GetWeight(i, f, t));
            */
            //dla innych ustawic wagi 0, dla samych sobie 1

            for (int i = 0; i < encoder.LayerCount - 1; i++)
                for (int f = 0; f < network.GetLayerNeuronCount(i); f++)
                    for (int t = 0; t < network.GetLayerNeuronCount(i + 1); t++)
                    {
                        if (f < encoder.GetLayerNeuronCount(i) && t >= encoder.GetLayerNeuronCount(i + 1))
                            network.SetWeight(i, f, t, 0);
                        else if (f >= encoder.GetLayerNeuronCount(i) && t < encoder.GetLayerNeuronCount(i + 1))
                            network.SetWeight(i, f, t, 0);
                        else if (f >= encoder.GetLayerNeuronCount(i) && t >= encoder.GetLayerNeuronCount(i + 1))
                            network.SetWeight(i, f, t, 1);
                        else
                            network.SetWeight(i, f, t, encoder.GetWeight(i, f, t));
                    }

            //uczenie koncowej sieci
            trainingSet = new BasicMLDataSet(origData, ideal);

            train = new ResilientPropagation(network, trainingSet);
            //train = new Backpropagation(network, trainingSet, LEARNING_RATE, MOMENTUM);

            epoch = 1;
            trainError = new double[FINAL_NETWORK_MAX_ITER];
            do
            {
                train.Iteration();
                ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error;
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                trainError[epoch - 1] = train.Error;
                epoch++;

            } while (train.Error > EPS && epoch < FINAL_NETWORK_MAX_ITER);
            errors.Add(trainError);
            train.FinishTraining();

            try
            {
                string networkFileName = "autoencoder wo cmp 300 125 50 3";
                EncogDirectoryPersistence.SaveObject(new FileInfo(networkFileName), network);
                MessageBox.Show("NETWORK SAVED TO FILE " + networkFileName);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }

            return errors;
        }
 private void Compare2Networks(BasicNetwork n1, BasicNetwork n2)
 {
     string oneLay = string.Empty;
     for (int i = 0; i < n1.LayerCount - 1; i++)
     {
         oneLay += ("Layer: " + i + ": \n");
         for (int f = 0; f < n1.GetLayerNeuronCount(i); f++)
         {
             oneLay += ("Neuron: " + f + "\n");
             for (int t = 0; t < n1.GetLayerNeuronCount(i + 1); t++)
             {
                 oneLay += (n1.GetWeight(i, f, t) + ", ");
             }
             oneLay += "\n";
         }
         oneLay += "\n";
     }
     oneLay += "---------------------------------------\n\n";
     for (int i = 0; i < n2.LayerCount - 1; i++)
     {
         oneLay += ("Layer: " + i + ": \n");
         for (int f = 0; f < n2.GetLayerNeuronCount(i); f++)
         {
             oneLay += ("Neuron: " + f + "\n");
             for (int t = 0; t < n2.GetLayerNeuronCount(i + 1); t++)
             {
                 oneLay += (n2.GetWeight(i, f, t) + ", ");
             }
             oneLay += "\n";
         }
         oneLay += "\n";
     }
     MessageBox.Show(oneLay);
 }
Ejemplo n.º 9
0
        /// <summary>
        /// Новые метки должны быть уже в нейронной сети neuralNet
        /// </summary>
        /// <param name="weidth"></param>
        /// <param name="nodes"></param>
        /// <param name="neuralNet"></param>
        private void ReLabelLinks(List<ShapeNode>[] nodes, BasicNetwork neuralNet)
        {
            string tmpWeigth;

            for (int i = neuralNet.LayerCount - 1; i > 0; i--)
            {
                for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
                {
                    for (int y = 0; y < neuralNet.Flat.LayerCounts[i - 1]; y++)
                    {
                        try
                        {
                            tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
                            nodes[i][x].OutgoingLinks[y].Text = tmpWeigth;
                        }
                        catch
                        {
                            tmpWeigth = "null";
                        }
                    }
                }
            }
        }
Ejemplo n.º 10
0
        private void DrawNeuralNetPair(Diagram diagram, out List<ShapeNode>[] nodes, BasicNetwork neuralNet)
        {
            if (diagram.Items.Count > 0)
                diagram.ClearAll();

            ProcessPair pp = cbProcessPair.SelectedItem as ProcessPair;
            TemplXML.FormData form = ConvertDataArrayToXml(templ, trainingData[pp.Pair].InputArray);

            double dx = 400;
            double rastNode = 30;
            double startYF = 50;
            double startX = dx + 50; // + (neuralNet.LayerCount - 1) * 300;
            double startY = 50;
            double diam = 30;
            double startYMax = startY;
            nodes = new List<ShapeNode>[neuralNet.LayerCount];
            string label = "null";
            graphPairLinkInput = new List<DiagramLink>(neuralNet.Flat.LayerCounts[neuralNet.LayerCount - 1]);
            for (int i = neuralNet.LayerCount - 1; i >= 0; i--)
            {
                List<ShapeNode> curN = new List<ShapeNode>();
                for (int j = 0; j < neuralNet.Flat.LayerCounts[i]; j++)
                {
                    ShapeNode tmp = DiagramHelper.CreateNode(diagram, startX, startY, diam, diam, j.ToString());
                    tmp.MouseLeftButtonDown += nodeSelected_MouseLeftButtonDown;
                    curN.Add(tmp);

                    if (i == neuralNet.LayerCount - 1 && j < neuralNet.InputCount)
                    {
                        ShapeNode q = DiagramHelper.CreateNode(Shapes.Rectangle, diagram, startX - dx - 200, startY - 10, 200, 50,  form.Values[j].Field.Title);
                        if (form.Values[j] is TemplXML.FormDataValueNumber)
                        {
                            var tmpF = form.Values[j] as TemplXML.FormDataValueNumber;
                            label = string.Format("{0}", tmpF.Value);
                        }
                        else
                        {
                            var tmpF = form.Values[j] as TemplXML.FormDataValueSelect;
                            label = tmpF.Value.Title;
                        }
                        var tmpLink = DiagramHelper.CreateLink(diagram, q, tmp, label);
                        tmpLink.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
                        graphPairLinkInput.Add(tmpLink);
                    }

                    startY += diam + rastNode;
                }
                nodes[i] = curN;
                startX += dx;
                if (startYMax < startY) startYMax = startY;
                if (i != 0)
                    startY = startYMax / 2 - neuralNet.Flat.LayerCounts[i - 1] * (rastNode + diam) / 2;
            }

            string tmpWeigth;
            int countN;
            for (int i = neuralNet.LayerCount - 1; i > 0; i--)
            {
                countN = countN = neuralNet.Flat.LayerCounts[i - 1];
                if (i - 1 == neuralNet.LayerCount - 2 && neuralNet.GetLayerBiasActivation(i - 1) > 0)
                    countN -= 1;
                for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
                {

                    for (int y = 0; y < countN; y++)
                    {
                        tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
                        var link = DiagramHelper.CreateLink(diagram, nodes[i][x], nodes[i - 1][y], tmpWeigth);
                        link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
                    }
                }
            }
        }
Ejemplo n.º 11
0
        private void DrawNeuralNet(Diagram diagram, out List<ShapeNode>[] nodes, BasicNetwork neuralNet)
        {
            if (diagram.Items.Count > 0)
                diagram.ClearAll();
            //получение шаблона
            InitEvent netInit = cbTrainsLog.SelectedItem as InitEvent;
            //TemplXML.FormTemplate templ;
            string pathXML = netInit.Path.Replace(".np4", ".xml");
            if (!File.Exists(pathXML))
                MessageBox.Show("Форма не найдена");
            templ = TemplXML.FormTemplate.FromXml(XElement.Load(pathXML));

            double dx = 400;
            double rastNode = 30;
            double startYF = 50;
            double startX = dx + 50; // + (neuralNet.LayerCount - 1) * 300;
            double startY = 50;
            double diam = 30;
            double startYMax = startY;
            nodes = new List<ShapeNode>[neuralNet.LayerCount];
            for (int i = neuralNet.LayerCount - 1 ; i >= 0; i--)
            {
                List<ShapeNode> curN = new List<ShapeNode>();
                for (int j = 0; j < neuralNet.Flat.LayerCounts[i]; j++)
                {
                    ShapeNode tmp = DiagramHelper.CreateNode(diagram, startX, startY, diam, diam, j.ToString());
                    tmp.MouseLeftButtonDown += nodeSelected_MouseLeftButtonDown;;
                    curN.Add(tmp);

                    if (i == neuralNet.LayerCount - 1 && j <neuralNet.InputCount)
                    {
                        ShapeNode q = DiagramHelper.CreateNode(Shapes.Rectangle, diagram, startX - dx, startY, 200, 50, templ.Fields[j].Title);
                        DiagramLink link = DiagramHelper.CreateLink(diagram, q, tmp);
                        link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
                    }

                    startY += diam + rastNode;
                }
                nodes[i] = curN;
                startX += dx;
                if (startYMax < startY) startYMax = startY;
                if(i != 0)
                    startY = startYMax / 2 - neuralNet.Flat.LayerCounts[i - 1] * (rastNode + diam) / 2;
            }

            string tmpWeigth;
            int countN;
            for (int i = neuralNet.LayerCount - 1; i > 0; i--)
            {
                countN = countN = neuralNet.Flat.LayerCounts[i - 1];
                if (i - 1 == neuralNet.LayerCount - 2 && neuralNet.GetLayerBiasActivation(i - 1) > 0)
                    countN -= 1;
                for (int x = 0; x < neuralNet.Flat.LayerCounts[i]; x++)
                {

                    for (int y = 0; y < countN; y++)
                    {
                        tmpWeigth = neuralNet.GetWeight(neuralNet.LayerCount - i - 1, x, y).ToString("F4");
                        var link = DiagramHelper.CreateLink(diagram, nodes[i][x], nodes[i - 1][y], tmpWeigth);
                        link.MouseLeftButtonDown += linkSelect_MouseLeftButtonDown;
                    }
                }
            }
        }