Пример #1
0
 public Layer(NeuralNetwork network, LayerType layerType, ActivationFunction activationFunction, int mapCount, int mapWidth, int mapHeight, int receptiveFieldWidth, int receptiveFieldHeight, bool lockedWeights = false) : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, activationFunction, mapCount *mapWidth *mapHeight, true, mapCount, mapWidth, mapHeight, true, receptiveFieldWidth, receptiveFieldHeight, network.Layers[network.Layers.Count - 1], null, lockedWeights)
 {
 }
Пример #2
0
 public Layer(NeuralNetwork network, LayerType layerType, ActivationFunction activationFunction, int mapCount, int mapWidth, int mapHeight, Mappings mappings, bool lockedWeights = false) : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, activationFunction, mapCount *mapWidth *mapHeight, true, mapCount, mapWidth, mapHeight, false, 1, 1, ((network.Layers.Count == 0) ? (null) : (network.Layers[network.Layers.Count - 1])), mappings, lockedWeights)
 {
 }
Пример #3
0
 public Layer(NeuralNetwork network, LayerType layerType, int mapCount, int mapWidth, int mapHeight, bool lockedWeights = false) : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, ActivationFunction.Tanh, mapCount *mapWidth *mapHeight, true, mapCount, mapWidth, mapHeight, true, 0, 0, ((network.Layers.Count == 0) ? (null) : (network.Layers[network.Layers.Count - 1])), null, lockedWeights)
 {
 }
Пример #4
0
 public Layer(NeuralNetwork network, LayerType layerType, ActivationFunction activationFunction, int neuronCount, Mappings mappings, bool lockedWeights = false) : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, activationFunction, neuronCount, false, 1, 1, 1, false, 0, 0, ((network.Layers.Count == 0) ? (null) : (network.Layers[network.Layers.Count - 1])), mappings, lockedWeights)
 {
 }
Пример #5
0
        public Layer(NeuralNetwork network, int layerIndex, LayerType layerType, ActivationFunction activationFunction, int neuronCount, bool useMapInfo, int mapCount, int mapWidth, int mapHeight, bool isFullyMapped, int receptiveFieldWidth, int receptiveFieldHeight, Layer previousLayer, Mappings mappings, bool lockedWeights = false)
        {
            Network              = network;
            LayerIndex           = layerIndex;
            LayerType            = layerType;
            ActivationFunction   = activationFunction;
            NeuronCount          = neuronCount;
            UseMapInfo           = useMapInfo;
            MapCount             = mapCount;
            MapWidth             = mapWidth;
            MapHeight            = mapHeight;
            IsFullyMapped        = isFullyMapped;
            ReceptiveFieldWidth  = receptiveFieldWidth;
            ReceptiveFieldHeight = receptiveFieldHeight;
            PreviousLayer        = previousLayer;
            LockedWeights        = lockedWeights;


            Neurons = new Neuron[NeuronCount];
            for (int i = 0; i < NeuronCount; i++)
            {
                Neurons[i] = new Neuron();
            }
            //NeuronPartitioner = Partitioner.Create(0, NeuronCount);
            useNeuronPartitioner = NeuronCount > 500;

            int[] kernelTemplate;
            int   iNumWeight = 0;
            int   position   = 0;

            switch (LayerType)
            {
            case LayerType.Input:
                ActivationFunction = ActivationFunction.None;
                WeightCount        = 0;
                Weights            = null;
                break;

            case LayerType.Convolutional:
                int totalMappings;
                if (UseMapInfo)
                {
                    if (IsFullyMapped)
                    {
                        totalMappings = PreviousLayer.MapCount * MapCount;
                    }
                    else
                    {
                        Mappings = mappings;
                        if (Mappings != null)
                        {
                            if (Mappings.Mapping.Count() == PreviousLayer.MapCount * MapCount)
                            {
                                totalMappings = Mappings.Mapping.Count(p => p == true);
                            }
                            else
                            {
                                throw new ArgumentException("Invalid mappings definition");
                            }
                        }
                        else
                        {
                            throw new ArgumentException("Empty mappings definition");
                        }
                    }

                    WeightCount = (totalMappings * ReceptiveFieldWidth * ReceptiveFieldHeight) + MapCount;
                    Weights     = new Weight[WeightCount];

                    kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                    Parallel.For(0, ReceptiveFieldHeight, Network.ParallelOption, row =>
                    {
                        for (int column = 0; column < ReceptiveFieldWidth; column++)
                        {
                            kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                        }
                    });

                    int positionPrevMap = 0;
                    iNumWeight = 0;
                    int mapping    = 0;
                    int prevCurMap = -1;
                    if (!IsFullyMapped)     // not fully mapped
                    {
                        for (int curMap = 0; curMap < MapCount; curMap++)
                        {
                            for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                            {
                                positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                if (mappings.IsMapped(curMap, prevMap, MapCount))
                                {
                                    for (int y = 0; y < MapHeight; y++)
                                    {
                                        for (int x = 0; x < MapWidth; x++)
                                        {
                                            position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = (mapping * (ReceptiveFieldWidth * ReceptiveFieldHeight)) + curMap;
                                            if (prevCurMap != curMap)
                                            {
                                                Neurons[position].AddBias(iNumWeight++);
                                            }

                                            for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                            {
                                                Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[i] + positionPrevMap, iNumWeight++);
                                            }
                                        }
                                    }
                                    mapping++;
                                    prevCurMap = curMap;
                                }
                            }
                        }
                    }
                    else     // Fully mapped
                    {
                        if (totalMappings > MapCount)
                        {
                            for (int curMap = 0; curMap < MapCount; curMap++)
                            {
                                for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                                {
                                    positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                    for (int y = 0; y < MapHeight; y++)
                                    {
                                        for (int x = 0; x < MapWidth; x++)
                                        {
                                            position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = (mapping * ReceptiveFieldWidth * ReceptiveFieldHeight) + curMap;

                                            if (prevCurMap != curMap)
                                            {
                                                Neurons[position].AddBias(iNumWeight++);
                                            }

                                            for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                            {
                                                Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[i] + positionPrevMap, iNumWeight++);
                                            }
                                        }
                                    }
                                    mapping++;
                                    prevCurMap = curMap;
                                }
                            }
                        }
                        else     // PreviousLayer has only one map
                        {
                            for (int curMap = 0; curMap < MapCount; curMap++)
                            {
                                for (int y = 0; y < MapHeight; y++)
                                {
                                    for (int x = 0; x < MapWidth; x++)
                                    {
                                        position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                        iNumWeight = curMap * ((ReceptiveFieldWidth * ReceptiveFieldHeight) + 1);

                                        Neurons[position].AddBias(iNumWeight++);

                                        for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                        {
                                            Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[i], iNumWeight++);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                else
                {
                    throw new ArgumentException("Inadequate mapping information provided");
                }
                break;

            case LayerType.ConvolutionalSubsampling:      // Simard's implementation
                if (UseMapInfo)
                {
                    if (IsFullyMapped)
                    {
                        totalMappings = PreviousLayer.MapCount * MapCount;
                    }
                    else
                    {
                        Mappings = mappings;
                        if (Mappings != null)
                        {
                            if (Mappings.Mapping.Count() == PreviousLayer.MapCount * MapCount)
                            {
                                totalMappings = Mappings.Mapping.Count(p => p == true);
                            }
                            else
                            {
                                throw new ArgumentException("Invalid mappings definition");
                            }
                        }
                        else
                        {
                            throw new ArgumentException("Empty mappings definition");
                        }
                    }

                    WeightCount = (totalMappings * ReceptiveFieldWidth * ReceptiveFieldHeight) + MapCount;
                    Weights     = new Weight[WeightCount];

                    kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                    Parallel.For(0, ReceptiveFieldHeight, Network.ParallelOption, row =>
                    {
                        for (int column = 0; column < ReceptiveFieldWidth; column++)
                        {
                            kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                        }
                    });

                    int positionPrevMap = 0;
                    iNumWeight = 0;
                    int mapping    = 0;
                    int prevCurMap = -1;
                    if (!IsFullyMapped)     // not fully mapped
                    {
                        for (int curMap = 0; curMap < MapCount; curMap++)
                        {
                            for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                            {
                                positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                if (mappings.IsMapped(curMap, prevMap, MapCount))
                                {
                                    for (int y = 0; y < MapHeight; y++)
                                    {
                                        for (int x = 0; x < MapWidth; x++)
                                        {
                                            position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = (mapping * (ReceptiveFieldWidth * ReceptiveFieldHeight)) + curMap;
                                            if (prevCurMap != curMap)
                                            {
                                                Neurons[position].AddBias(iNumWeight++);
                                            }

                                            for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                            {
                                                Neurons[position].AddConnection((x * 2) + (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[i] + positionPrevMap, iNumWeight++);
                                            }
                                        }
                                    }
                                    mapping++;
                                    prevCurMap = curMap;
                                }
                            }
                        }
                    }
                    else     // Fully mapped
                    {
                        if (totalMappings > MapCount)
                        {
                            for (int curMap = 0; curMap < MapCount; curMap++)
                            {
                                for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                                {
                                    positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                    for (int y = 0; y < MapHeight; ++y)
                                    {
                                        for (int x = 0; x < MapWidth; ++x)
                                        {
                                            position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = (mapping * ReceptiveFieldWidth * ReceptiveFieldHeight) + curMap;

                                            if (prevCurMap != curMap)
                                            {
                                                Neurons[position].AddBias(iNumWeight++);
                                            }

                                            for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                            {
                                                Neurons[position].AddConnection((x * 2) + (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[i] + positionPrevMap, iNumWeight++);
                                            }
                                        }
                                    }
                                    mapping++;
                                    prevCurMap = curMap;
                                }
                            }
                        }
                        else     // PreviousLayer has only one map
                        {
                            for (int curMap = 0; curMap < MapCount; curMap++)
                            {
                                for (int y = 0; y < MapHeight; y++)
                                {
                                    for (int x = 0; x < MapWidth; x++)
                                    {
                                        position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                        iNumWeight = curMap * ((ReceptiveFieldWidth * ReceptiveFieldHeight) + 1);

                                        Neurons[position].AddBias(iNumWeight++);

                                        for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                        {
                                            Neurons[position].AddConnection((x * 2) + (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[i], iNumWeight++);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                else
                {
                    throw new ArgumentException("Inadequate mapping information provided");
                }
                break;

            case LayerType.Subsampling:
                if (UseMapInfo)
                {
                    if (IsFullyMapped)
                    {
                        // Symmetrical mapping
                        List <bool> mapCombinations = new List <bool>(PreviousLayer.MapCount * MapCount);
                        for (int x = 0; x < MapCount; x++)
                        {
                            for (int y = 0; y < PreviousLayer.MapCount; y++)
                            {
                                mapCombinations.Add(x == y);
                            }
                        }
                        mappings = new Mappings(mapCombinations);
                    }

                    Mappings = mappings;
                    if (Mappings != null)
                    {
                        if (Mappings.Mapping.Count() == PreviousLayer.MapCount * MapCount)
                        {
                            totalMappings = Mappings.Mapping.Count(p => p == true);
                        }
                        else
                        {
                            throw new ArgumentException("Invalid mappings definition");
                        }
                    }
                    else
                    {
                        throw new ArgumentException("Empty mappings definition");
                    }

                    WeightCount = MapCount * 2;
                    Weights     = new Weight[WeightCount];

                    SubsamplingScalingFactor = 1D / (receptiveFieldWidth * ReceptiveFieldHeight);

                    kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                    Parallel.For(0, ReceptiveFieldHeight, Network.ParallelOption, row =>
                    {
                        for (int column = 0; column < ReceptiveFieldWidth; column++)
                        {
                            kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                        }
                    });

                    int positionPrevMap = 0;
                    iNumWeight = 0;
                    if (PreviousLayer.MapCount > 1)     //fully symmetrical mapped
                    {
                        for (int curMap = 0; curMap < MapCount; curMap++)
                        {
                            for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                            {
                                positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                if (mappings.IsMapped(curMap, prevMap, MapCount))
                                {
                                    for (int y = 0; y < MapHeight; y++)
                                    {
                                        for (int x = 0; x < MapWidth; x++)
                                        {
                                            position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = curMap * 2;
                                            Neurons[position].AddBias(iNumWeight++);

                                            for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                            {
                                                Neurons[position].AddConnection((x * ReceptiveFieldWidth) + (y * ReceptiveFieldHeight * PreviousLayer.MapWidth) + kernelTemplate[i] + positionPrevMap, iNumWeight);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else     // only one previous layer
                    {
                        for (int curMap = 0; curMap < MapCount; curMap++)
                        {
                            for (int y = 0; y < MapHeight; y++)
                            {
                                for (int x = 0; x < MapWidth; x++)
                                {
                                    position   = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                    iNumWeight = curMap * 2;

                                    Neurons[position].AddBias(iNumWeight++);

                                    for (int i = 0; i < (ReceptiveFieldWidth * ReceptiveFieldHeight); i++)
                                    {
                                        Neurons[position].AddConnection((x * ReceptiveFieldWidth) + (y * ReceptiveFieldHeight * PreviousLayer.MapWidth) + kernelTemplate[i], iNumWeight);
                                    }
                                }
                            }
                        }
                    }
                }
                break;

            case LayerType.FullyConnected:
                WeightCount = (PreviousLayer.NeuronCount + 1) * NeuronCount;
                Weights     = new Weight[WeightCount];

                iNumWeight = 0;
                if (UseMapInfo)
                {
                    for (int curMap = 0; curMap < MapCount; curMap++)
                    {
                        for (int yc = 0; yc < MapHeight; yc++)
                        {
                            for (int xc = 0; xc < MapWidth; xc++)
                            {
                                position = xc + (yc * MapWidth) + (curMap * MapWidth * MapHeight);
                                Neurons[position].AddBias(iNumWeight++);

                                for (int prevMaps = 0; prevMaps < PreviousLayer.MapCount; prevMaps++)
                                {
                                    for (int y = 0; y < PreviousLayer.MapHeight; y++)
                                    {
                                        for (int x = 0; x < PreviousLayer.MapWidth; x++)
                                        {
                                            Neurons[position].AddConnection((x + (y * PreviousLayer.MapWidth) + (prevMaps * PreviousLayer.MapWidth * PreviousLayer.MapHeight)), iNumWeight++);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                else
                {
                    for (int y = 0; y < NeuronCount; y++)
                    {
                        Neurons[y].AddBias(iNumWeight++);
                        for (int x = 0; x < PreviousLayer.NeuronCount; x++)
                        {
                            Neurons[y].AddConnection(x, iNumWeight++);
                        }
                    }
                }
                break;

            case LayerType.RBF:
                WeightCount = PreviousLayer.NeuronCount * NeuronCount;     // no biasses
                Weights     = new Weight[WeightCount];

                iNumWeight = 0;
                if (UseMapInfo)
                {
                    for (int n = 0; n < NeuronCount; n++)
                    {
                        for (int prevMaps = 0; prevMaps < PreviousLayer.MapCount; prevMaps++)
                        {
                            for (int y = 0; y < PreviousLayer.MapHeight; y++)
                            {
                                for (int x = 0; x < PreviousLayer.MapWidth; x++)
                                {
                                    Neurons[n].AddConnection((x + (y * PreviousLayer.MapWidth) + (prevMaps * PreviousLayer.MapWidth * PreviousLayer.MapHeight)), iNumWeight++);
                                }
                            }
                        }
                    }
                }
                else
                {
                    for (int y = 0; y < NeuronCount; y++)
                    {
                        for (int x = 0; x < PreviousLayer.NeuronCount; x++)
                        {
                            Neurons[y].AddConnection(x, iNumWeight++);
                        }
                    }
                }
                break;
            }
            ;

            //if (WeightCount > 0)
            //{
            //    WeightPartitioner = Partitioner.Create(0, WeightCount);
            //}
            useWeightPartitioner = WeightCount > 1000;

            int conn = 0;

            foreach (Neuron neuron in Neurons)
            {
                conn += neuron.Connections.Count();
            }

            Name += "Layer: " + LayerIndex.ToString(CultureInfo.CurrentCulture) + "\r\n";
            Name += "Layer Type: " + LayerType.ToString() + "\r\n" +
                    ((LayerType == LayerType.Input) ? ("") : ("Activation Function: " + ActivationFunction.ToString() + "\r\n")) +
                    ((LayerType == LayerType.Convolutional || LayerType == LayerType.Subsampling) ? ("Receptive Field: " + ReceptiveFieldWidth.ToString(CultureInfo.CurrentCulture) + "x" + ReceptiveFieldHeight.ToString(CultureInfo.CurrentCulture) + "\r\n") : "") +
                    ((UseMapInfo) ? ("Maps: " + MapCount.ToString(CultureInfo.CurrentCulture) + "x(" + MapWidth.ToString(CultureInfo.CurrentCulture) + "x" + MapHeight.ToString(CultureInfo.CurrentCulture) + ")" + "\r\n") : ("")) +
                    "Neurons: " + NeuronCount.ToString(CultureInfo.CurrentCulture) + "\r\n" +
                    ((LayerType != LayerType.Input) ? ("Weights: " + Weights.Count().ToString(CultureInfo.CurrentCulture) + "\r\n") : ("")) +
                    ((LayerType == LayerType.Input) ? ("") : ("Connections: " + conn.ToString(CultureInfo.CurrentCulture) + "\r\n"));


            if (PreviousLayer != null)
            {
                PreviousLayer.NextLayer = this;
            }
        }