コード例 #1
0
ファイル: NeuralNetworks.cs プロジェクト: PkuRainBow/leafasis
        public static NeuralNetworks Load(string fileName, bool includeWeights = false)
        {
            NeuralNetworks network = null;

            if (fileName.Contains("-gz"))
            {
                using (NeuralNetworkDataSet ds = new NeuralNetworkDataSet())
                {
                    using (FileStream inFile = File.OpenRead(fileName))
                    {
                        using (GZipStream Decompress = new GZipStream(inFile, CompressionMode.Decompress))
                        {
                            ds.ReadXml(Decompress, XmlReadMode.ReadSchema);
                        }
                    }

                    if (ds.NeuralNetworks.Rows.Count == 1)
                    {
                        network = new NeuralNetworks();

                        NeuralNetworkDataSet.NeuralNetworksRow networkRow = ds.NeuralNetworks.First();
                        network.Id = networkRow.NetworkId;
                        network.Name = networkRow.Name;
                        network.TrainToValue = networkRow.TrainTo;
                        network.LossFunction = (LossFunctions)networkRow.LossFunction;
                        network.CreatedOn = networkRow.CreatedOn;
                        network.dMicron = networkRow.DMicron;

                        Layers layer;
                        Layers previousLayer = null;
                        foreach (NeuralNetworkDataSet.LayersRow layerRow in networkRow.GetLayersRows())
                        {
                            List<bool> isMapped = new List<bool>();
                            foreach (NeuralNetworkDataSet.MappingsRow mappingRow in layerRow.GetMappingsRows())
                            {
                                isMapped.Add(mappingRow.IsMapped);
                            }

                            Mappings mappings = null;
                            if (isMapped.Count > 0)
                                mappings = new Mappings(network, layerRow.LayerIndex, isMapped);

                            layer = new Layers(network, layerRow.LayerIndex, (LayerTypes)layerRow.LayerType, (KernelTypes)layerRow.KernelType, layerRow.NeuronCount, layerRow.UseMapInfo, layerRow.MapCount, layerRow.MapWidth, layerRow.MapHeight, layerRow.IsFullyMapped, layerRow.ReceptiveFieldWidth, layerRow.ReceptiveFieldHeight, previousLayer, mappings, layerRow.LockedWeights);
                            if ((includeWeights) && (layerRow.GetWeightsRows().Count() > 0))
                            {
                                int i = 0;
                                foreach (NeuralNetworkDataSet.WeightsRow weightRow in layerRow.GetWeightsRows())
                                {
                                    layer.Weights[i].Value = weightRow.Value;
                                    layer.Weights[i].DiagonalHessian = weightRow.DiagonalHessian;
                                    i++;
                                }
                            }
                            network.Layers.Add(layer);
                            previousLayer = layer;
                        }

                        if (!includeWeights)
                            network.InitWeights(true);
                    }
                    else
                    {
                        InformationDialog.Show(null, "Invalid data format.", "Select an different file", "Information");
                    }
                }
            }

            return network;
        }
コード例 #2
0
ファイル: NeuralNetworks.cs プロジェクト: PkuRainBow/leafasis
 public Layers(NeuralNetworks network, LayerTypes layerType, KernelTypes kernelType, int mapCount, int mapWidth, int mapHeight, int receptiveFieldWidth, int receptiveFieldHeight, Mappings mappings, bool lockedWeights = false)
     : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, kernelType, mapCount * mapWidth * mapHeight, true, mapCount, mapWidth, mapHeight, false, receptiveFieldWidth, receptiveFieldHeight, network.Layers[network.Layers.Count - 1], mappings, lockedWeights)
 {
 }
コード例 #3
0
ファイル: NeuralNetworks.cs プロジェクト: PkuRainBow/leafasis
        public Layers(NeuralNetworks network, int layerIndex, LayerTypes layerType, KernelTypes kernelType, int neuronCount, bool useMapInfo, int mapCount, int mapWidth, int mapHeight, bool isFullyMapped, int receptiveFieldWidth, int receptiveFieldHeight, Layers previousLayer, Mappings mappings, bool lockedWeights = false)
        {
            Network = network;
            LayerIndex = layerIndex;
            LayerType = layerType;
            KernelType = kernelType;
            NeuronCount = neuronCount;
            UseMapInfo = useMapInfo;
            MapCount = mapCount;
            MapWidth = mapWidth;
            MapHeight = mapHeight;
            IsFullyMapped = isFullyMapped;
            ReceptiveFieldWidth = receptiveFieldWidth;
            ReceptiveFieldHeight = receptiveFieldHeight;
            PreviousLayer = previousLayer;
            LockedWeights = lockedWeights;

            Neurons = new Neuron[NeuronCount];
            for (int i = 0; i < NeuronCount; i++)
            {
                Neurons[i] = new Neuron();
            }

            int[] kernelTemplate;
            int iNumWeight = 0;
            int position = 0;

            switch (LayerType)
            {
                case LayerTypes.Input:
                    WeightCount = 0;
                    Weights = new Weight[WeightCount];
                    break;

                case LayerTypes.Convolutional:
                    int totalMappings;
                    if (UseMapInfo)
                    {
                        if (IsFullyMapped)
                        {
                            totalMappings = PreviousLayer.MapCount * MapCount;
                        }
                        else
                        {
                            Mappings = mappings;
                            if (Mappings != null)
                            {
                                if (Mappings.IsMapped.Count() == PreviousLayer.MapCount * MapCount)
                                    totalMappings = Mappings.IsMapped.Count(p => p == true);
                                else
                                    throw new ArgumentException("Invalid mappings definition");
                            }
                            else
                                throw new ArgumentException("Empty mappings definition");
                        }

                        WeightCount = (totalMappings * ReceptiveFieldWidth * ReceptiveFieldHeight) + MapCount;
                        Weights = new Weight[WeightCount];

                        kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                        for (int row = 0; row < ReceptiveFieldHeight; row++)
                        {
                            for (int column = 0; column < ReceptiveFieldWidth; column++)
                            {
                                kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                            }
                        }

                        int positionPrevMap = 0;
                        iNumWeight = 0;
                        int mapping = 0;
                        int prevCurMap = -1;
                        if (!IsFullyMapped) // not fully mapped
                        {
                            for (int curMap = 0; curMap < MapCount; ++curMap)
                            {
                                for (int prevMap = 0; prevMap < PreviousLayer.MapCount; ++prevMap)
                                {
                                    positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                    if (mappings.IsMapped[(curMap * PreviousLayer.MapCount) + prevMap] == true)
                                    {
                                        for (int y = 0; y < MapHeight; ++y)
                                        {
                                            for (int x = 0; x < MapWidth; ++x)
                                            {
                                                position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                                iNumWeight = (mapping * (ReceptiveFieldWidth * ReceptiveFieldHeight)) + curMap;
                                                if (prevCurMap != curMap)
                                                    Neurons[position].AddBias(iNumWeight++);

                                                for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                                {
                                                    Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[k] + positionPrevMap, iNumWeight++);
                                                }
                                            }
                                        }
                                        mapping++;
                                        prevCurMap = curMap;
                                    }
                                }
                            }
                        }
                        else // Fully mapped
                        {
                            if (totalMappings > MapCount)
                            {
                                for (int curMap = 0; curMap < MapCount; curMap++)
                                {
                                    for (int prevMap = 0; prevMap < PreviousLayer.MapCount; prevMap++)
                                    {
                                        positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                        for (int y = 0; y < MapHeight; y++)
                                        {
                                            for (int x = 0; x < MapWidth; x++)
                                            {
                                                position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                                iNumWeight = (mapping * ReceptiveFieldWidth * ReceptiveFieldHeight) + curMap;

                                                if (prevCurMap != curMap)
                                                    Neurons[position].AddBias(iNumWeight++);

                                                for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                                {
                                                    Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[k] + positionPrevMap, iNumWeight++);
                                                }
                                            }
                                        }
                                        mapping++;
                                        prevCurMap = curMap;
                                    }
                                }
                            }
                            else // PreviousLayer has only one map
                            {
                                for (int curMap = 0; curMap < MapCount; ++curMap)
                                {
                                    for (int y = 0; y < MapHeight; ++y)
                                    {
                                        for (int x = 0; x < MapWidth; ++x)
                                        {
                                            position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = curMap * ((ReceptiveFieldWidth * ReceptiveFieldHeight) + 1);

                                            Neurons[position].AddBias(iNumWeight++);

                                            for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                            {
                                                Neurons[position].AddConnection(x + (y * PreviousLayer.MapWidth) + kernelTemplate[k], iNumWeight++);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        throw new ArgumentException("Inadequate mapping information provided");
                    }
                    break;

                case LayerTypes.ConvolutionalSubsampling:  // Simard's implementation
                    if (UseMapInfo)
                    {
                        if (IsFullyMapped)
                        {
                            totalMappings = PreviousLayer.MapCount * MapCount;
                        }
                        else
                        {
                            Mappings = mappings;
                            if (Mappings != null)
                            {
                                if (Mappings.IsMapped.Count() == PreviousLayer.MapCount * MapCount)
                                    totalMappings = Mappings.IsMapped.Count(p => p == true);
                                else
                                    throw new ArgumentException("Invalid mappings definition");
                            }
                            else
                                throw new ArgumentException("Empty mappings definition");
                        }

                        WeightCount = (totalMappings * ReceptiveFieldWidth * ReceptiveFieldHeight) + MapCount;
                        Weights = new Weight[WeightCount];

                        kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                        for (int row = 0; row < ReceptiveFieldHeight; ++row)
                        {
                            for (int column = 0; column < ReceptiveFieldWidth; ++column)
                            {
                                kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                            }
                        }

                        int positionPrevMap = 0;
                        iNumWeight = 0;
                        int mapping = 0;
                        int prevCurMap = -1;
                        if (!IsFullyMapped) // not fully mapped
                        {
                            for (int curMap = 0; curMap < MapCount; ++curMap)
                            {
                                for (int prevMap = 0; prevMap < PreviousLayer.MapCount; ++prevMap)
                                {
                                    positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                    if (mappings.IsMapped[(curMap * PreviousLayer.MapCount) + prevMap] == true)
                                    {
                                        for (int y = 0; y < MapHeight; ++y)
                                        {
                                            for (int x = 0; x < MapWidth; ++x)
                                            {
                                                position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                                iNumWeight = (mapping * (ReceptiveFieldWidth * ReceptiveFieldHeight)) + curMap;
                                                if (prevCurMap != curMap)
                                                    Neurons[position].AddBias(iNumWeight++);

                                                for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                                {
                                                    Neurons[position].AddConnection((x * 2)+ (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[k] + positionPrevMap, iNumWeight++);
                                                }
                                            }
                                        }
                                        mapping++;
                                        prevCurMap = curMap;
                                    }
                                }
                            }
                        }
                        else // Fully mapped
                        {
                            if (totalMappings > MapCount)
                            {
                                for (int curMap = 0; curMap < MapCount; ++curMap)
                                {
                                    for (int prevMap = 0; prevMap < PreviousLayer.MapCount; ++prevMap)
                                    {
                                        positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                        for (int y = 0; y < MapHeight; ++y)
                                        {
                                            for (int x = 0; x < MapWidth; ++x)
                                            {
                                                position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                                iNumWeight = (mapping * ReceptiveFieldWidth * ReceptiveFieldHeight) + curMap;

                                                if (prevCurMap != curMap)
                                                    Neurons[position].AddBias(iNumWeight++);

                                                for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                                {
                                                    Neurons[position].AddConnection((x * 2) + (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[k] + positionPrevMap, iNumWeight++);
                                                }
                                            }
                                        }
                                        mapping++;
                                        prevCurMap = curMap;
                                    }
                                }
                            }
                            else // PreviousLayer has only one map
                            {
                                for (int curMap = 0; curMap < MapCount; ++curMap)
                                {
                                    for (int y = 0; y < MapHeight; ++y)
                                    {
                                        for (int x = 0; x < MapWidth; ++x)
                                        {
                                            position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                            iNumWeight = curMap * ((ReceptiveFieldWidth * ReceptiveFieldHeight) + 1);

                                            Neurons[position].AddBias(iNumWeight++);

                                            for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                            {
                                                Neurons[position].AddConnection((x * 2) + (y * 2 * PreviousLayer.MapWidth) + kernelTemplate[k], iNumWeight++);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        throw new ArgumentException("Inadequate mapping information provided");
                    }
                    break;

                case LayerTypes.Subsampling:
                    if (UseMapInfo)
                    {
                        if (IsFullyMapped)
                        {
                            // Symmetrical mapping
                            List<bool> mapCombinations = new List<bool>(PreviousLayer.MapCount * MapCount);
                            for (int x = 0; x < MapCount; x++)
                            {
                                for (int y = 0; y < PreviousLayer.MapCount; y++)
                                {
                                    mapCombinations.Add(x == y);
                                }
                            }
                            mappings = new Mappings(network, PreviousLayer.LayerIndex, mapCombinations);
                        }

                        Mappings = mappings;
                        if (Mappings != null)
                        {
                            if (Mappings.IsMapped.Count() == PreviousLayer.MapCount * MapCount)
                                totalMappings = Mappings.IsMapped.Count(p => p == true);
                            else
                                throw new ArgumentException("Invalid mappings definition");
                        }
                        else
                            throw new ArgumentException("Empty mappings definition");

                        WeightCount = MapCount * 2;
                        Weights = new Weight[WeightCount];

                        SubsamplingScalingFactor = 1D / (receptiveFieldWidth * ReceptiveFieldHeight);

                        kernelTemplate = new int[ReceptiveFieldWidth * ReceptiveFieldHeight];
                        for (int row = 0; row < ReceptiveFieldHeight; ++row)
                        {
                            for (int column = 0; column < ReceptiveFieldWidth; ++column)
                            {
                                kernelTemplate[column + (row * ReceptiveFieldWidth)] = column + (row * PreviousLayer.MapWidth);
                            }
                        }

                        int positionPrevMap = 0;
                        iNumWeight = 0;
                        if (PreviousLayer.MapCount > 1) //fully symmetrical mapped
                        {
                            for (int curMap = 0; curMap < MapCount; ++curMap)
                            {
                                for (int prevMap = 0; prevMap < PreviousLayer.MapCount; ++prevMap)
                                {
                                    positionPrevMap = prevMap * PreviousLayer.MapWidth * PreviousLayer.MapHeight;

                                    if (mappings.IsMapped[(curMap * PreviousLayer.MapCount) + prevMap] == true)
                                    {
                                        for (int y = 0; y < MapHeight; ++y)
                                        {
                                            for (int x = 0; x < MapWidth; ++x)
                                            {
                                                position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                                iNumWeight = curMap * 2;
                                                Neurons[position].AddBias(iNumWeight++);

                                                for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                                {
                                                    Neurons[position].AddConnection((x * ReceptiveFieldWidth) + (y * ReceptiveFieldHeight * PreviousLayer.MapWidth) + kernelTemplate[k] + positionPrevMap, iNumWeight);
                                                }
                                            }
                                        }
                                    }
                                }
                            }
                        }
                        else // only one previous layer
                        {
                            for (int curMap = 0; curMap < MapCount; ++curMap)
                            {
                                for (int y = 0; y < MapHeight; ++y)
                                {
                                    for (int x = 0; x < MapWidth; ++x)
                                    {
                                        position = x + (y * MapWidth) + (curMap * MapWidth * MapHeight);
                                        iNumWeight = curMap * 2;

                                        Neurons[position].AddBias(iNumWeight++);

                                        for (int k = 0; k < (ReceptiveFieldWidth * ReceptiveFieldHeight); ++k)
                                        {
                                            Neurons[position].AddConnection((x * ReceptiveFieldWidth) + (y * ReceptiveFieldHeight * PreviousLayer.MapWidth) + kernelTemplate[k], iNumWeight);
                                        }
                                    }
                                }
                            }
                        }
                    }
                    break;

                case LayerTypes.FullyConnected:
                    WeightCount = (PreviousLayer.NeuronCount + 1) * NeuronCount;
                    Weights = new Weight[WeightCount];

                    iNumWeight = 0;
                    if (UseMapInfo)
                    {
                        for (int curMap = 0; curMap < MapCount; ++curMap)
                        {
                            for (int yc = 0;  yc < MapHeight; ++yc)
                            {
                                for (int xc = 0; xc < MapWidth; ++xc)
                                {
                                    position = xc + (yc * MapWidth) + (curMap * MapWidth * MapHeight);
                                    Neurons[position].AddBias(iNumWeight++);

                                    for (int prevMaps = 0; prevMaps < PreviousLayer.MapCount; ++prevMaps)
                                    {
                                        for (int y = 0; y < PreviousLayer.MapHeight; ++y)
                                        {
                                            for (int x = 0; x < PreviousLayer.MapWidth; ++x)
                                            {
                                                Neurons[position].AddConnection((x + (y * PreviousLayer.MapWidth) + (prevMaps * PreviousLayer.MapWidth * PreviousLayer.MapHeight)), iNumWeight++);
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        for (int y = 0; y < NeuronCount; ++y)
                        {
                            Neurons[y].AddBias(iNumWeight++);
                            for (int x = 0; x < PreviousLayer.NeuronCount; ++x)
                            {
                                Neurons[y].AddConnection(x, iNumWeight++);
                            }
                        }
                    }
                    break;

                case LayerTypes.RBF:
                    WeightCount = PreviousLayer.NeuronCount * NeuronCount; // no biasses
                    Weights = new Weight[WeightCount];

                    iNumWeight = 0;
                    if (UseMapInfo)
                    {
                        for (int n = 0; n < NeuronCount; ++n)
                        {
                            for (int prevMaps = 0; prevMaps < PreviousLayer.MapCount; ++prevMaps)
                            {
                                for (int y = 0; y < PreviousLayer.MapHeight; ++y)
                                {
                                    for (int x = 0; x < PreviousLayer.MapWidth; ++x)
                                    {
                                        Neurons[n].AddConnection((x + (y * PreviousLayer.MapWidth) + (prevMaps * PreviousLayer.MapWidth * PreviousLayer.MapHeight)), iNumWeight++);
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        for (int y = 0; y < NeuronCount; ++y)
                        {
                            for (int x = 0; x < PreviousLayer.NeuronCount; ++x)
                            {
                                Neurons[y].AddConnection(x, iNumWeight++);
                            }
                        }
                    }
                    break;
            };

            int conn = 0;
            foreach (Neuron neuron in Neurons)
            {
                conn += neuron.Connections.Count();
            }

            Name += "Layer: " + LayerIndex.ToString(CultureInfo.CurrentCulture) + "\r\n";
            Name += "Layer type: " + LayerType.ToString() + "\r\n" +
                   ((KernelType != KernelTypes.None) ? ("Kernel type: " + KernelType.ToString() + "\r\n") : ("")) +
                   ((LayerType == LayerTypes.Convolutional) ? ("Receptive field: " + ReceptiveFieldWidth.ToString(CultureInfo.CurrentCulture) + "x" + ReceptiveFieldHeight.ToString(CultureInfo.CurrentCulture) + "\r\n") : "") +
                   ((UseMapInfo) ? ("Maps: " + MapCount.ToString(CultureInfo.CurrentCulture) + "x(" + MapWidth.ToString(CultureInfo.CurrentCulture) + "x" + MapHeight.ToString(CultureInfo.CurrentCulture) + ")" + "\r\n") : ("")) +
                   "Neurons: " + NeuronCount.ToString(CultureInfo.CurrentCulture) + "\r\n" +
                   ((LayerType != LayerTypes.Input) ? ("Weights: " + Weights.Count().ToString(CultureInfo.CurrentCulture) + "\r\n") : ("")) +
                   "Connections: " + conn.ToString(CultureInfo.CurrentCulture) + "\r\n";

            if (PreviousLayer != null)
            {
                PreviousLayer.NextLayer = this;
            }
        }
コード例 #4
0
ファイル: NeuralNetworks.cs プロジェクト: PkuRainBow/leafasis
 public Layers(NeuralNetworks network, LayerTypes layerType, KernelTypes kernelType, int neuronCount, Mappings mappings, bool lockedWeights = false)
     : this(network, ((network.Layers.Count == 0) ? (0) : (network.Layers.Count)), layerType, kernelType, neuronCount, false, 1, 1, 1, false, 0, 0, ((network.Layers.Count == 0) ? (null) : (network.Layers[network.Layers.Count - 1])), mappings, lockedWeights)
 {
 }