示例#1
0
        public void CloneTest()
        {
            ReLULayer layer1 = new ReLULayer(new Shape(new int[] { 2 }));
            ReLULayer layer2 = layer1.Clone() as ReLULayer;

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
示例#2
0
        public void ForwardBackwardTest()
        {
            Shape     shape = new Shape(new int[] { 2 });
            ReLULayer layer = new ReLULayer(shape);

            Session session = new Session();

            Tensor source = new Tensor(null, shape);

            source.Set(new float[] { 2, -3 });
            Tensor x = source.Clone() as Tensor;
            Tensor y = layer.Forward(session, new[] { x })[0];

            float[] expected = source.Weights.Take(source.Length).Select(w => ReLULayerTest.activation(w)).ToArray();
            Helpers.AreArraysEqual(x.Length, expected, y.Weights);

            // unroll the graph
            float[] dy = Enumerable.Range(1, x.Length).Select(w => (float)w).ToArray();
            y.SetGradient(dy);
            session.Unroll();

            Helpers.AreArraysEqual(
                expected.Length,
                expected.Zip(dy, (w, dw) => ReLULayerTest.derivative(w) * dw).ToArray(),
                x.Gradient);
        }
示例#3
0
        public void CopyConstructorTest1()
        {
            ReLULayer layer1 = new ReLULayer(new Shape(new int[] { 2 }));
            ReLULayer layer2 = new ReLULayer(layer1);

            Assert.AreEqual(JsonConvert.SerializeObject(layer1), JsonConvert.SerializeObject(layer2));
        }
示例#4
0
        public void ArchitectureConstructorTest1()
        {
            Shape     shape = new Shape(new int[] { 2 });
            ReLULayer layer = new ReLULayer(shape, "RELU", null);

            CollectionAssert.AreEqual(shape.Axes, layer.OutputShape.Axes);
            Assert.AreEqual("RELU", layer.Architecture);
        }
示例#5
0
        public void SerializeTest()
        {
            ReLULayer layer1 = new ReLULayer(new Shape(new int[] { 2 }));
            string    s1     = JsonConvert.SerializeObject(layer1);
            ReLULayer layer2 = JsonConvert.DeserializeObject <ReLULayer>(s1);
            string    s2     = JsonConvert.SerializeObject(layer2);

            Assert.AreEqual(s1, s2);
        }
示例#6
0
        public MNISTNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize);
            relu    = new ReLULayer();
            affine2 = new AffineLayer(hiddenSize, outputSize);
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }
示例#7
0
        public void ArchitectureConstructorTest2()
        {
            string architecture = "REL";

            try
            {
                ReLULayer layer = new ReLULayer(new Shape(new int[] { 2 }), architecture, null);
            }
            catch (ArgumentException e)
            {
                Assert.AreEqual(
                    new ArgumentException(string.Format(CultureInfo.InvariantCulture, Properties.Resources.E_InvalidLayerArchitecture, architecture), nameof(architecture)).Message,
                    e.Message);
                throw;
            }
        }
示例#8
0
        public MNISTBatchNormalizationNetwork(int inputSize, int hiddenSize, int outputSize) : base()
        {
            this.inputSize  = inputSize;
            this.hiddenSize = hiddenSize;
            this.outputSize = outputSize;

            affine1 = new AffineLayer(inputSize, hiddenSize, Mathf.Sqrt(2.0f / inputSize));
            bn1     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu1   = new ReLULayer();

            affine2 = new AffineLayer(hiddenSize, hiddenSize, Mathf.Sqrt(2.0f / hiddenSize));
            bn2     = new BatchNormalizationLayer(hiddenSize, hiddenSize);
            relu2   = new ReLULayer();

            affine3 = new AffineLayer(hiddenSize, outputSize, Mathf.Sqrt(2.0f / hiddenSize));
            softmax = new SoftmaxLayer();

            optimizer = new MomentumOptimizer(0.9f);
        }