コード例 #1
0
        // ova metoda se izgleda koristila ranije za odredjivanje koeficijenta tezina u kernelu u zavisnosti od broja konekcija
        // koriscena je u gornjoj metodi connectMaps
        private double getWeightCoeficient(FeatureMapLayer toMap)
        {
            int    numberOfInputConnections = toMap.getNeuronAt(0, 0).InputConnections.Count;
            double coefficient = 1d / Math.Sqrt(numberOfInputConnections);

            coefficient = !double.IsInfinity(coefficient) || !double.IsNaN(coefficient) || coefficient == 0 ? 1 : coefficient;
            return(coefficient);
        }
コード例 #2
0
        /// <summary>
        /// Adds a feature map (2d layer) to this feature map layer </summary>
        /// <param name="featureMap"> feature map to add </param>
        public virtual void addFeatureMap(FeatureMapLayer featureMap)
        {
            if (featureMap == null)
            {
                throw new System.ArgumentException("FeatureMap cant be null!");
            }

            featureMaps.Add(featureMap);
            neurons.AddRange((featureMap.Neurons));
        }
コード例 #3
0
        /// <summary>
        /// Creates connections with shared weights between two feature maps Assumes
        /// that toMap is from Convolutional layer.
        /// <p/>
        /// Kernel is used as a sliding window, and kernel positions overlap. Kernel
        /// is shifting right by one position at a time. Neurons at the same kernel
        /// position share the same weights
        /// </summary>
        /// <param name="fromMap"> source feature map </param>
        /// <param name="toMap"> destination feature map </param>
        public override void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap)
        {
            Kernel kernel = toMap.Kernel;

            kernel.initWeights(-0.15, 0.15); // zasto ove vrednosti ???
                                             //  int numberOfSharedWeights = kernel.getArea();
                                             //        Weight[,] weights = new Weight[kernel.getHeight(),kernel.getWidth()];
                                             //        //double coefficient = getWeightCoeficient(toMap);
                                             //        // initialize kernel with random weights
                                             //        // ovo prebaciti u kernel
                                             //        for (int i = 0; i < kernel.getHeight(); i++) {
                                             //            for (int j = 0; j < kernel.getWidth(); j++) {
                                             //                Weight weight = new Weight();
                                             //                weight.randomize(-0.15, 0.15); // zasto ove vrednosti?
                                             //                weights[i,j] = weight;
                                             //            }
                                             //        }
                                             //        kernel.setWeights(weights); // na kraju svi kerneli od svih feature mapa imaju iste tezine jer gadjaju istu instancu kernela od nadklase!!!!
                                             //                                    // kernel prebaciti u Layer2D preimenovati ga u FeatureMapLayer i dodati mu kernel...
                                             //                                    // pored kernela dodati mu i BiasNeuron...
            BiasNeuron biasNeuron = new BiasNeuron();

            fromMap.addNeuron(biasNeuron);


            // ovo se koristi samo za povezivanje dva konvoluciona sloja !!!
            // dodati step za from - ne mora da bude samo 1
            // ostaje pitanje kako se primenjuje na ivici - trebalo bi od centra - dodati onaj okvir sa strane!!!!
            for (int y = 0; y < toMap.Height; y++)                // iterate all neurons by height in toMap  -- verovatno bi i ovde trebalo zameniti redosled x i y!!!
            {
                for (int x = 0; x < toMap.Width; x++)             // iterate all neurons by width in toMap
                {
                    Neuron toNeuron = toMap.getNeuronAt(x, y);    // get neuron at specified position in toMap
                    for (int ky = 0; ky < kernel.Height; ky++)    // iterate kernel positions by y
                    {
                        for (int kx = 0; kx < kernel.Width; kx++) // iterate kernel positions by x
                        {
                            int fromX = x + kx;                   // calculate the x position of from neuron
                            int fromY = y + ky;                   // calculate the y position of from neuron
                                                                  //int currentWeightIndex = kx + ky * kernel.getHeight(); // find the idx of the shared weight
                            Weight[,] concreteKernel = kernel.Weights;
                            Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY);
                            ConnectionFactory.createConnection(fromNeuron, toNeuron, concreteKernel[kx, ky]); // - da li je ovo dobro ???
                                                                                                              // also create connection from bias
                            ConnectionFactory.createConnection(biasNeuron, toNeuron);
                        }
                    }
                }
            }
        }
コード例 #4
0
        /// <summary>
        /// Creates connections with shared weights between two feature maps
        /// Assumes that toMap is from Pooling layer.
        /// <p/>
        /// In this implementation, there is no overlapping between kernel positions.
        /// </summary>
        /// <param name="fromMap"> source feature map </param>
        /// <param name="toMap">   destination feature map </param>
        public override void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap)
        {
            int    kernelWidth  = kernel.Width;
            int    kernelHeight = kernel.Height;
            Weight weight       = new Weight(1);

            for (int x = 0; x < fromMap.Width - kernelWidth + 1; x += kernelWidth)             // < da li step treba da je kernel
            {
                for (int y = 0; y < fromMap.Height - kernelHeight + 1; y += kernelHeight)
                {
                    Neuron toNeuron = toMap.getNeuronAt(x / kernelWidth, y / kernelHeight);
                    for (int dy = 0; dy < kernelHeight; dy++)
                    {
                        for (int dx = 0; dx < kernelWidth; dx++)
                        {
                            int    fromX      = x + dx;
                            int    fromY      = y + dy;
                            Neuron fromNeuron = fromMap.getNeuronAt(fromX, fromY);
                            ConnectionFactory.createConnection(fromNeuron, toNeuron, weight);
                        }
                    }
                }
            }
        }
コード例 #5
0
 /// <summary>
 /// Creates connections between two feature maps. It does nothing here,
 /// connectivity patterns are defined by subclasses...
 /// Maybe it should be even removed from here or made abstract......
 /// </summary>
 /// <param name="fromMap"> </param>
 /// <param name="toMap"> </param>
 public abstract void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap);
コード例 #6
0
        /// <summary>
        /// Returns neuron instance at specified (x, y) position at specified feature map layer
        /// </summary>
        /// <param name="x">        neuron's x position </param>
        /// <param name="y">        neuron's y position </param>
        /// <param name="mapIndex"> feature map index </param>
        /// <returns> neuron at specified (x, y, map) position </returns>
        public virtual Neuron getNeuronAt(int x, int y, int mapIndex)
        {
            FeatureMapLayer map = featureMaps[mapIndex];

            return(map.getNeuronAt(x, y));
        }
コード例 #7
0
 public override void connectMaps(FeatureMapLayer fromMap, FeatureMapLayer toMap)
 {
     // does nothing
 }