Example #1
0
        private static CNTKDictionary GetInitializer(NeuralNetworkInitializer initializer, uint seed)
        {
            var scale = CNTKLib.DefaultParamInitScale;
            var rank  = CNTKLib.SentinelValueForInferParamInitRank;

            switch (initializer)
            {
            case NeuralNetworkInitializer.Zero: return(CNTKLib.ConstantInitializer(0.0f));

            case NeuralNetworkInitializer.GlorotNormal: return(CNTKLib.GlorotNormalInitializer(scale, rank, rank, seed));

            case NeuralNetworkInitializer.GlorotUniform: return(CNTKLib.GlorotUniformInitializer(scale, rank, rank, seed));

            case NeuralNetworkInitializer.HeNormal: return(CNTKLib.HeNormalInitializer(scale, rank, rank, seed));

            case NeuralNetworkInitializer.HeUniform: return(CNTKLib.HeUniformInitializer(scale, rank, rank, seed));

            case NeuralNetworkInitializer.Normal: return(CNTKLib.NormalInitializer(scale, rank, rank, seed));

            case NeuralNetworkInitializer.TruncateNormal: return(CNTKLib.TruncatedNormalInitializer(scale, seed));

            case NeuralNetworkInitializer.Uniform: return(CNTKLib.UniformInitializer(scale, seed));

            case NeuralNetworkInitializer.Xavier: return(CNTKLib.XavierInitializer(scale, rank, rank, seed));

            default:
                throw new InvalidOperationException("");
            }
        }
Example #2
0
    public static Tensor FullyConnectedLinearLayer(Tensor input, int outputDim, NeuralNetworkInitializer initializer, int seed)
    {
        int inputDim = (int)input.shape[1];

        IInitializer?init = GetInitializer(initializer, seed);
        IVariableV1  W    = tf.compat.v1.get_variable("W", new int[] { inputDim, outputDim }, tf.float32, init);

        IVariableV1 b = tf.compat.v1.get_variable("b", new int[] { outputDim }, tf.float32, init);

        return(tf.matmul(input, W.AsTensor()) + b.AsTensor());
    }
Example #3
0
        protected override NeuralNetworkImage[] BrainInitializer()
        {
            var relu = new SoftReLU();
            var init = new NeuralNetworkInitializer()
                       .SetInputSize(20);

            for (int i = 1; i < LAYER_COUBNT - 1; i++)
            {
                init.AddLayer(relu, LAYER_COUBNT + 40 - (i + 1));
            }

            init.AddLayer(new Sigmoind(), 40)
            .SetCorrection(new Errorest())
            .SetDataConvertor(new DataRange(128, -128), new DataRange(255, 0));

            return(new NeuralNetworkImage[] { (NeuralNetworkImage)init.Image() });
        }
Example #4
0
        protected override NeuralNetworkImage[] BrainInitializer()
        {
            var conduction = setting.Brain.Layers.Conduction;
            var layers     = setting.Brain.Layers.NodesCount;

            if (layers == null || layers.Length == 0)
            {
                setting.Brain.Layers.NodesCount = new int[0];
                throw new Exception("the default layer's node count is not set.");
            }

            var image = new NeuralNetworkInitializer()
                        .SetInputSize(2)
                        .AddLayer(FunctionDecoder.Conduction(conduction), layers)
                        .AddLayer(new SoftMax(), 2)
                        .SetCorrection(new CrossEntropy())
                        .SetDataConvertor(new DataRange(SignalRange, SignalHeight), null)
                        .Image();

            return(new NeuralNetworkImage[] { (NeuralNetworkImage)image });
        }
Example #5
0
    private static IInitializer GetInitializer(NeuralNetworkInitializer initializer, int seed)
    {
        switch (initializer)
        {
        case NeuralNetworkInitializer.glorot_uniform_initializer: return(tf.glorot_uniform_initializer);

        case NeuralNetworkInitializer.ones_initializer: return(tf.ones_initializer);

        case NeuralNetworkInitializer.zeros_initializer: return(tf.zeros_initializer);

        case NeuralNetworkInitializer.random_uniform_initializer: return(tf.random_uniform_initializer);

        case NeuralNetworkInitializer.orthogonal_initializer: return(tf.orthogonal_initializer);

        case NeuralNetworkInitializer.random_normal_initializer: return(tf.random_normal_initializer(seed: seed));

        case NeuralNetworkInitializer.truncated_normal_initializer: return(tf.truncated_normal_initializer(seed: seed));

        case NeuralNetworkInitializer.variance_scaling_initializer: return(tf.variance_scaling_initializer(seed: seed));

        default:
            throw new InvalidOperationException("");
        }
    }
Example #6
0
    public static Tensor DenseLayer(Tensor input, int outputDim, NeuralNetworkActivation activation, NeuralNetworkInitializer initializer, int seed, string name)
    {
        //if (input.shape.Rank != 1)
        //{
        //    int newDim = input.Shape.Dimensions.Aggregate((d1, d2) => d1 * d2);
        //    input = CNTKLib.Reshape(input, new int[] { newDim });
        //}

        return(tf_with(tf.variable_scope(name), delegate
        {
            Tensor fullyConnected = FullyConnectedLinearLayer(input, outputDim, initializer, seed);
            switch (activation)
            {
            case NeuralNetworkActivation.None: return fullyConnected;

            case NeuralNetworkActivation.ReLU: return tf.nn.relu(fullyConnected, "ReLU");

            case NeuralNetworkActivation.Sigmoid: return tf.nn.sigmoid(fullyConnected, "Sigmoid");

            case NeuralNetworkActivation.Tanh: return tf.nn.tanh(fullyConnected, "Tanh");

            default: throw new InvalidOperationException("Unexpected activation " + activation);
            }
        }));
    }
Example #7
0
        public static Function FullyConnectedLinearLayer(Variable input, int outputDim, NeuralNetworkInitializer initializer, int seed, DeviceDescriptor device)
        {
            System.Diagnostics.Debug.Assert(input.Shape.Rank == 1);
            int inputDim = input.Shape[0];

            var init = GetInitializer(initializer, (uint)seed);
            var W    = new Parameter(new int[] { outputDim, inputDim }, DataType.Float, init, device, "W");

            var b = new Parameter(new int[] { outputDim }, DataType.Float, init, device, "b");

            return(b + W * input);
        }
Example #8
0
        public static Function DenseLayer(Variable input, int outputDim, DeviceDescriptor device, NeuralNetworkActivation activation, NeuralNetworkInitializer initializer, int seed, string name)
        {
            if (input.Shape.Rank != 1)
            {
                int newDim = input.Shape.Dimensions.Aggregate((d1, d2) => d1 * d2);
                input = CNTKLib.Reshape(input, new int[] { newDim });
            }

            Function fullyConnected = FullyConnectedLinearLayer(input, outputDim, initializer, seed, device);

            fullyConnected.SetName(name);
            switch (activation)
            {
            case NeuralNetworkActivation.None: return(fullyConnected);

            case NeuralNetworkActivation.ReLU: return(CNTKLib.ReLU(fullyConnected, name + "ReLU"));

            case NeuralNetworkActivation.Sigmoid: return(CNTKLib.Sigmoid(fullyConnected, name + "Sigmoid"));

            case NeuralNetworkActivation.Tanh: return(CNTKLib.Tanh(fullyConnected, name + "Tanh"));

            default: throw new InvalidOperationException("Unexpected activation " + activation);
            }
        }