Esempio n. 1
0
        public void TSSetUp()
        {
            int[] layers = new int[] {
                1024, //32x32 input image
                4704, //6 @ 28x28 convolution
                1176, //6 @  14x14 subsampling
                1600, //16 @ 10x10 convolution
                400,  //16 @ 5x5 subsampling
                120,  // 120x1x1 convolution!
                84,   //full
                10    //full
            };
            LayerConnector[] map = new LayerConnector[] {
                new ConvolutionAuto(5, 6, 1, 4),
                new SubSampling(6),
                new Convolution(5, 16, 6, 4, GetSchema()),
                new SubSampling(16),
                new ConvolutionAuto(5, 120, 16, 0),
                new FullLayerConnector(),
                new FullLayerConnector()
            };
            ConvolutionalTopology  topology  = new ConvolutionalTopology(layers, 1, map);
            ConvolutionalGenerator generator = new ConvolutionalGenerator();

            Network = generator.Create(topology);
        }
Esempio n. 2
0
        public static NetworkVisualizer CreateVisualizer(ConvolutionalNetwork network, string[] actionIndex)
        {
            var nv = GameObject.Instantiate(Resources.Load <NetworkVisualizer>("NetworkVisualizer"));

            nv.Init(network, actionIndex);
            return(nv);
        }
Esempio n. 3
0
 public ConvolutionalExecution(ILinearAlgebraProvider lap, ConvolutionalNetwork network)
 {
     foreach (var layer in network.ConvolutionalLayer)
     {
         if (layer.Type == ConvolutionalNetwork.ConvolutionalLayerType.Convolutional)
         {
             _convolutional.Add(new Convolutional(lap, layer));
         }
         else if (layer.Type == ConvolutionalNetwork.ConvolutionalLayerType.MaxPooling)
         {
             _convolutional.Add(new MaxPooling(layer.FilterWidth, layer.FilterHeight, layer.Stride));
         }
     }
     _feedForward = lap.NN.CreateFeedForward(network.FeedForward);
 }
Esempio n. 4
0
        private void button3_Click(object sender, EventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.CheckFileExists = true;
            dlg.CheckPathExists = true;
            dlg.Filter          = "xml files (*.xml)|*.xml";
            dlg.Multiselect     = false;
            if (dlg.ShowDialog() == DialogResult.OK)
            {
                ConvolutionalXMLSerializer serializer = new ConvolutionalXMLSerializer();
                Network             = serializer.Deserialize(XDocument.Load(dlg.FileName));
                Network.LearnFactor = 0.0005;
                Loaded = true;
            }
        }
Esempio n. 5
0
        private void Init(ConvolutionalNetwork cnn, string[] actionIndex)
        {
            _cnn = cnn;
            DontDestroyOnLoad(gameObject);
            var p = GetComponentInChildren <GridLayoutGroup>();

            _convLayers = _cnn.IterateSpatialLayers().Select(sl => new ConvLayerVisualizer(sl)).ToList();
            foreach (var l in _convLayers)
            {
                l.CreateUI().transform.SetParent(p.transform, false);
            }
            _denseLayer = new DenseLayerVisualizer(_cnn.IterateSpatialLayers().Last(), _cnn.CombinationLayer, _cnn.OutputLayer, actionIndex);
            foreach (var ui in _denseLayer.GetUI())
            {
                ui.transform.SetParent(p.transform, false);
            }
        }
Esempio n. 6
0
        void BuildNetwork()
        {
            int[] layers = new int[] {
                841, 1014, 1250, 100, 10
            };
            LayerConnector[] map = new LayerConnector[] {
                new ConvolutionAuto(5, 6, 1, 3),
                new ConvolutionAuto(5, 50, 6, 1),
                new FullLayerConnector(),
                new FullLayerConnector()
            };
            ConvolutionalTopology  topology  = new ConvolutionalTopology(layers, 1, map, new HyperbolicTangent());
            ConvolutionalGenerator generator = new ConvolutionalGenerator();

            Network             = generator.Create(topology);
            Network.LearnFactor = 0.0005;
            Network.Reset(-0.1, 0.1);
        }
Esempio n. 7
0
        public BackPropoginationNew()
        {
            var activation = ActivationType.BipolarSigmoid;
            var relu       = ActivationType.ReLu;

            Network = new ConvolutionalNetwork();

            Network.InitLayers(26, 26,
                               new ConvolutionalLayer(relu, 5, 3),  //24
                               new MaxPoolingLayer(2),              //12
                               new ConvolutionalLayer(relu, 10, 3), //10
                               new MaxPoolingLayer(2),              //5
                                                                    //new FullyConnectedLayer(100, activation),
                               new FullyConnectedLayer(50, activation),
                               new FullyConnectedLayer(50, activation),
                               new FullyConnectedLayer(1, activation)
                               );

            Network.Randomize();
        }
Esempio n. 8
0
        public static void testLearningOneLayer()
        {
            Dimension2D inputDimension = new Dimension2D(5, 5);

            Dimension2D convolutionKernel = new Dimension2D(3, 3);

            ConvolutionalNetwork convolutionNet = (new ConvolutionalNetwork.Builder()).withInputLayer(5, 5, 1).withConvolutionLayer(3, 3, 2).withFullConnectedLayer(2).build();


            // CREATE DATA SET

            DataSet dataSet = new DataSet(25, 2);

            dataSet.addRow(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, new double[] { 1, 0 });
            dataSet.addRow(new double[] { 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0 }, new double[] { 0, 1 });

            // TRAIN NETWORK

            convolutionNet.LearningRule.MaxError = 0.00001;
            convolutionNet.learn(dataSet);

            Console.WriteLine("Done training!");

            FeatureMapLayer featureMap1 = ((FeatureMapsLayer)convolutionNet.getLayerAt(1)).getFeatureMap(0);
            FeatureMapLayer featureMap2 = ((FeatureMapsLayer)convolutionNet.getLayerAt(1)).getFeatureMap(1);

            //        WeightVisualiser visualiser1 = new WeightVisualiser(featureMap1, convolutionKernel);
            //        visualiser1.displayWeights();
            //
            //        WeightVisualiser visualiser2 = new WeightVisualiser(featureMap2, convolutionKernel);
            //        visualiser2.displayWeights();


            // CREATE TEST SET

            DataSet testSet = new DataSet(25, 2);

            testSet.addRow(new double[] { 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, new double[] { 1, 0 });
            testSet.addRow(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0 }, new double[] { 1, 0 });
            testSet.addRow(new double[] { 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0 }, new double[] { 0, 1 });
        }
Esempio n. 9
0
        /// <param name="args"> Command line parameters used to initialize parameters of convolutional network
        ///             [0] - maximal number of epochs during learning
        ///             [1] - learning error stop condition
        ///             [2] - learning rate used during learning process
        ///             [3] - number of feature maps in 1st convolutional layer
        ///             [4] - number of feature maps in 2nd convolutional layer
        ///             [5] - number of feature maps in 3rd convolutional layer </param>
        public static void Main(string[] args)
        {
            try
            {
                int    maxIter      = 10000;         // Integer.parseInt(args[0]);
                double maxError     = 0.01;          //Double.parseDouble(args[1]);
                double learningRate = 0.2;           //  Double.parseDouble(args[2]);

                int layer1 = Convert.ToInt32(args[3]);
                int layer2 = Convert.ToInt32(args[4]);
                int layer3 = Convert.ToInt32(args[5]);

                LOG.info("{}-{}-{}", layer1, layer2, layer3);

                DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 100);
                DataSet testSet  = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000);

                Dimension2D inputDimension    = new Dimension2D(32, 32);
                Dimension2D convolutionKernel = new Dimension2D(5, 5);
                Dimension2D poolingKernel     = new Dimension2D(2, 2);

                ConvolutionalNetwork convolutionNetwork = (new ConvolutionalNetwork.Builder()).withInputLayer(32, 32, 1).withConvolutionLayer(5, 5, layer1).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, layer2).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, layer3).withFullConnectedLayer(10).build();

                ConvolutionalBackpropagation backPropagation = new ConvolutionalBackpropagation();
                backPropagation.LearningRate  = learningRate;
                backPropagation.MaxError      = maxError;
                backPropagation.MaxIterations = maxIter;
                backPropagation.addListener(new LearningListener(convolutionNetwork, testSet));
                backPropagation.ErrorFunction = new MeanSquaredError();

                convolutionNetwork.LearningRule = backPropagation;
                convolutionNetwork.learn(trainSet);

                Evaluation.runFullEvaluation(convolutionNetwork, testSet);
            }
            catch (IOException e)
            {
                Console.WriteLine(e.ToString());
                Console.Write(e.StackTrace);
            }
        }
Esempio n. 10
0
        public ConvolutionalBackPropagationLearning(ConvolutionalNetwork network)
        {
            this.network = network;

            var matrixLayers = network.Layers.Where(x => x.Type == LayerType.Convolution || x.Type == LayerType.MaxPoolingLayer).Select(x => x as IMatrixLayer).ToArray();

            fullyConnectedLayers = network.Layers.Where(x => x.Type == LayerType.FullyConnected).Select(x => x as IFullyConnectedLayer).ToArray();

            fullyConnectedNeuronErrors = new double[fullyConnectedLayers.Length][];
            convNeuronErrors           = new double[matrixLayers.Length][][, ];

            for (var i = 0; i < fullyConnectedLayers.Length; i++)
            {
                fullyConnectedNeuronErrors[i] = new double[fullyConnectedLayers[i].NeuronsCount];
            }

            for (var i = 0; i < matrixLayers.Length; i++)
            {
                convNeuronErrors[i] = new double[matrixLayers[i == matrixLayers.Length - 1 ? i : i + 1].Outputs.Length][, ];
            }
        }
Esempio n. 11
0
        public override void Initialize(int gridSize, int vectorSize, int depth)
        {
            // Action-index mapping.
            _amap = new Dictionary <string, int>();
            int ix = 0;

            foreach (QAction a in Actions)
            {
                _amap[a.ActionId] = ix++;
            }
            // Model.
            if (_remake)
            {
                _net = new ConvolutionalNetwork(gridSize, vectorSize, depth, _amap.Count, _networkArgs);
            }
            else
            {
                _net = ConvolutionalNetwork.Load(BenchmarkSave.ModelPath);
            }
            _net.InitializeTraining(LearningParams);
            // Experience replay.
            LoadExperienceDatabase();
        }
Esempio n. 12
0
 public IConvolutionalExecution CreateConvolutional(ConvolutionalNetwork network)
 {
     return(new ConvolutionalExecution(_lap, network));
 }
Esempio n. 13
0
        public void ConvolutionSerialization()
        {
            int[] layers = new int[] {
                1024, //32x32 input image
                4704, //6 @ 28x28 convolution
                1176, //6 @  14x14 subsampling
                1600, //16 @ 10x10 convolution
                400,  //16 @ 5x5 subsampling
                120,  // 120x1x1 convolution!
                84,   //full
                10    //full
            };
            LayerConnector[] map = new LayerConnector[] {
                new ConvolutionAuto(5, 6, 1, 4),
                new SubSampling(6),
                new ConvolutionAuto(5, 16, 6, 4),
                new SubSampling(16),
                new ConvolutionAuto(5, 120, 16, 0),
                new FullLayerConnector(),
                new FullLayerConnector()
            };
            ConvolutionalTopology  topology  = new ConvolutionalTopology(layers, 1, map);
            ConvolutionalGenerator generator = new ConvolutionalGenerator();
            ConvolutionalNetwork   network   = generator.Create(topology);

            ConvolutionalXMLSerializer serializer = new ConvolutionalXMLSerializer();
            XDocument            doc      = serializer.Serialize(network);
            ConvolutionalNetwork network2 = serializer.Deserialize(doc);

            Assert.AreEqual(network2.Structure.Elements.Length, network.Structure.Elements.Length);
            Assert.AreEqual(network2.Structure.Elements[3000].GetDescription(), network.Structure.Elements[3000].GetDescription());
            Assert.AreEqual(network2.Structure.Elements[0].Next[0].Weight.Value, network.Structure.Elements[0].Next[0].Weight.Value);
            Assert.AreEqual(((NeuronBase)network2.Structure.Elements[4000]).Previous[0].Weight.Value, ((NeuronBase)network.Structure.Elements[4000]).Previous[0].Weight.Value);
            Assert.AreEqual(((NeuronBase)network2.Structure.Elements[3004]).Func.GetType(), ((NeuronBase)network.Structure.Elements[3004]).Func.GetType());

            Weight w1 = network2.Structure.Layers[0][0].Next[0].Weight;
            Weight w2 = network2.Structure.Layers[0][1].Next.Select(x => x.Weight).FirstOrDefault(x => x == w1);

            Assert.IsNotNull(w1);
            Assert.IsTrue(w1 == w2);

            Link[] l1 = network2.Structure.Layers[1];

            int c = GetConnectionsCount(l1, false, true);
            int w = GetWeightsCount(l1, false, true);

            Assert.AreEqual(c, 122304);
            Assert.AreEqual(w, 156);

            for (int i = 0; i < network.Structure.Layers.Length; i++)
            {
                for (int j = 0; j < network.Structure.Layers[i].Length; j++)
                {
                    Connection[] n1 = network.Structure.Layers[i][j].Next;
                    if (n1 != null)
                    {
                        Connection[] n2 = network2.Structure.Layers[i][j].Next;
                        Assert.IsNotNull(n2);
                        for (int z = 0; z < n1.Length; z++)
                        {
                            Assert.AreEqual(n1[z].Weight.Value, n2[z].Weight.Value);
                            Assert.AreEqual(n1[z].Weight.GetType(), n2[z].Weight.GetType());
                        }
                    }
                    if (network.Structure.Layers[i][j] is NeuronBase)
                    {
                        Connection[] p1 = ((NeuronBase)network.Structure.Layers[i][j]).Previous;
                        if (p1 != null)
                        {
                            Connection[] p2 = ((NeuronBase)network2.Structure.Layers[i][j]).Previous;
                            Assert.IsNotNull(p2);
                            for (int z = 0; z < p1.Length; z++)
                            {
                                Assert.AreEqual(p1[z].Weight.Value, p2[z].Weight.Value);
                                Assert.AreEqual(p1[z].Weight.GetType(), p2[z].Weight.GetType());
                            }
                        }
                    }
                }
            }
        }