Exemplo n.º 1
0
 public LayerBuilder(int size, int connections, float lr, float mo, ANNOperationsEnum operation, ActivationFunctionEnum activation, EnumOptimizerFunction optmizator)
 {
     this.size        = size;
     this.connections = connections;
     this.lr          = lr;
     this.mo          = mo;
     this.operation   = operation;
     this.activation  = activation;
     this.optmizator  = optmizator;
 }
Exemplo n.º 2
0
 public static INeuron DenseSigmoid(int connections, int size, float learningRate, float std, EnumOptimizerFunction opt)
 {
     return(new LayerCreator(size, connections)
            .WithLearningRate(learningRate)
            .Supervised_f()
            .Hidden_f()
            .Activation_f()
            .Sigmoid_f()
            .WithOpetimizator_f(opt)
            .FullSynapse(std)
            .Build());
 }
Exemplo n.º 3
0
 public static IMultipleNeuron RecurrentTanh(int connections, int size, float learningRate, float std, EnumOptimizerFunction opt)
 {
     return(new LayerCreator(size, 0)
            .WithLearningRate(learningRate)
            .Supervised_f()
            .Hidden_f()
            .MultipleActivator_f(new[] { connections, size })
            .TANH_f()
            .WithOpetimizator_f(opt)
            .FullSynapse(std)
            .Build());
 }
Exemplo n.º 4
0
 public LSTMCellModel(int inputSize, int hiddenSize, float learningRate, float std, EnumOptimizerFunction opt)
 {
     w[0] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt);
     w[1] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt);
     w[2] = BuildedModels.DenseTanh(inputSize + hiddenSize, hiddenSize, learningRate, std, opt);
     w[3] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt);
 }
        public ClassifierRecurrentNeuralNetwork(int input, int output, int hidden, int units, float learningRate, float std, EnumOptimizerFunction opt)
        {
            encoder = new RecurrentCellModel();
            decoder = new DenseModel();

            int x = input;

            for (int i = 0; i < units; i++)
            {
                encoder.AddLayer(BuildedModels.RecurrentTanh(x, hidden, learningRate, std, opt));
                x = hidden;
            }

            decoder.AddLayer(BuildedModels.DenseSoftMax(hidden, output, learningRate, std, opt));
            decoder.SetLossFunction(new CrossEntropyLossFunction());

            input_size     = input;
            hidden_size    = hidden;
            output_size    = output;
            recurrentUnits = units;
            learning_rate  = learningRate;
        }
Exemplo n.º 6
0
 public LayerBuilderMultiple WithOpetimizator_f(EnumOptimizerFunction opt)
 {
     return(new LayerBuilderMultiple(size, connections, lr, mo, operation, activation, opt));
 }