public LayerBuilder(int size, int connections, float lr, float mo, ANNOperationsEnum operation, ActivationFunctionEnum activation, EnumOptimizerFunction optmizator) { this.size = size; this.connections = connections; this.lr = lr; this.mo = mo; this.operation = operation; this.activation = activation; this.optmizator = optmizator; }
public static INeuron DenseSigmoid(int connections, int size, float learningRate, float std, EnumOptimizerFunction opt) { return(new LayerCreator(size, connections) .WithLearningRate(learningRate) .Supervised_f() .Hidden_f() .Activation_f() .Sigmoid_f() .WithOpetimizator_f(opt) .FullSynapse(std) .Build()); }
public static IMultipleNeuron RecurrentTanh(int connections, int size, float learningRate, float std, EnumOptimizerFunction opt) { return(new LayerCreator(size, 0) .WithLearningRate(learningRate) .Supervised_f() .Hidden_f() .MultipleActivator_f(new[] { connections, size }) .TANH_f() .WithOpetimizator_f(opt) .FullSynapse(std) .Build()); }
public LSTMCellModel(int inputSize, int hiddenSize, float learningRate, float std, EnumOptimizerFunction opt) { w[0] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt); w[1] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt); w[2] = BuildedModels.DenseTanh(inputSize + hiddenSize, hiddenSize, learningRate, std, opt); w[3] = BuildedModels.DenseSigmoid(inputSize + hiddenSize, hiddenSize, learningRate, std, opt); }
public ClassifierRecurrentNeuralNetwork(int input, int output, int hidden, int units, float learningRate, float std, EnumOptimizerFunction opt) { encoder = new RecurrentCellModel(); decoder = new DenseModel(); int x = input; for (int i = 0; i < units; i++) { encoder.AddLayer(BuildedModels.RecurrentTanh(x, hidden, learningRate, std, opt)); x = hidden; } decoder.AddLayer(BuildedModels.DenseSoftMax(hidden, output, learningRate, std, opt)); decoder.SetLossFunction(new CrossEntropyLossFunction()); input_size = input; hidden_size = hidden; output_size = output; recurrentUnits = units; learning_rate = learningRate; }
public LayerBuilderMultiple WithOpetimizator_f(EnumOptimizerFunction opt) { return(new LayerBuilderMultiple(size, connections, lr, mo, operation, activation, opt)); }