public OutputLayer(int numIn, int numNeurons, ActivationFunctionType actFunc) { Random rand = new Random(); actFTyp = actFunc; NumNeurons = numNeurons; X = new double[numIn]; Error = new double[NumNeurons]; Bias = new double[NumNeurons]; W = new double[NumNeurons, numIn]; Output = new double[NumNeurons]; LocalGrad = new double[X.Length]; //for (int i = 0; i < NumNeurons; i++) //{ // Bias[i] = rand.NextDouble(); // for (int j = 0; j < numIn; j++) // { // W[i, j] = rand.NextDouble(); // } //} Parallel.For(0, NumNeurons, (i) => { Bias[i] = rand.NextDouble(); Parallel.For(0, numIn, (j) => { W[i, j] = rand.NextDouble(); }); }); }
public static Func <float, float> FromType(ActivationFunctionType activationFunctionType) { switch (activationFunctionType) { case ActivationFunctionType.Identity: return(Identity); case ActivationFunctionType.BinaryStep: return(BinaryStep); case ActivationFunctionType.SoftStep: return(SoftStep); case ActivationFunctionType.TanH: return(TanH); case ActivationFunctionType.ArcTan: return(ArcTan); case ActivationFunctionType.SoftSign: return(SoftSign); default: throw new ArgumentOutOfRangeException("activationFunctionType", activationFunctionType, null); } }
public ActivationFunction(ActivationFunctionType f_type) { switch (f_type) { case ActivationFunctionType.LINEAR: calculate = Methods.calculate_linear; break; case ActivationFunctionType.THRESHOLD: calculate = Methods.calculate_threshold; break; case ActivationFunctionType.LOGISTIC: calculate = Methods.calculate_logistic; break; case ActivationFunctionType.HYPERBOLIC_TANGENT: calculate = Methods.calculate_hyperbolic_tangent; break; default: break; } type = f_type; }
public K210SeparableConv2d(ReadOnlySpan <int> dimensions, Tensor <float> dwWeights, Tensor <float> pwWeigths, Tensor <float> bias, K210PoolType poolType, ActivationFunctionType fusedActivationFunction) { if (dimensions[2] < 4 || dimensions[3] < 4) { throw new ArgumentOutOfRangeException("Lower than 4x4 input is not supported in dwConv2d."); } PoolType = poolType; FusedActivationFunction = fusedActivationFunction; DwWeights = dwWeights; PwWeights = pwWeigths; Bias = bias; var stride = GetStride(); if (dimensions[2] / stride < 4 || dimensions[3] / stride < 4) { throw new ArgumentOutOfRangeException("Lower than 4x4 output is not supported in dwConv2d."); } Input = AddInput("input", dimensions); Output = AddOutput("output", new[] { dimensions[0], OutputChannels, dimensions[2] / stride, dimensions[3] / stride }); }
/// <summary> /// Create Genetic NN with given topology /// </summary> /// <param name="topology">Topology</param> /// <param name="populationCount">Population amount</param> /// <param name="activationFunctionType">Neuron activation function</param> /// <param name="initGenotype">Initial genotype (can be null)</param> public GeneticNN(uint[] topology, int populationCount, ActivationFunctionType activationFunctionType = ActivationFunctionType.SoftSignFunction, float[] initGenotype = null) { PopulationCount = populationCount; Topology = topology; ActivationFunctionType = activationFunctionType; NNs = new NeuralNetwork[populationCount]; Evaluations = new float[populationCount]; //create NN, init by random values for (var i = 0; i < populationCount; i++) { NNs[i] = new NeuralNetwork(activationFunctionType, topology); if (initGenotype != null) { NNs[i].SetWeights(initGenotype); //init by default weights (if presented) } else { NNs[i].SetRandomWeights(InitParamMin, InitParamMax); //init by random weights } } //build next generation based on default weights if (initGenotype != null) { for (var i = 0; i < populationCount; i++) { Evaluations[i] = 1; } BuildNextGeneration(); } }
public Filter1D(Layer1D[] previousLayers, int filterSize, ActivationFunctionType activationFunctionType, InitialisationFunctionType initialisationFunctionTyp) : base(filterSize, previousLayers, activationFunctionType, initialisationFunctionTyp) { var filterWeightMap = new Dictionary <Layer, Weight[]>(); foreach (var prevLayer in previousLayers) { var filterWeights = new Weight[filterSize]; for (var i = 0; i < filterSize; i++) { filterWeights[i] = new Weight(0); } filterWeightMap.Add(prevLayer, filterWeights); } var prevLayerNodesLength = previousLayers[0].Nodes.Count; var nodes = new List <Node>(); for (var i = 0; i < prevLayerNodesLength - filterSize + 1; i++) { var node = new Node(); for (var j = 0; j < filterSize; j++) { var nodePosition = i + j; foreach (var prevLayer in previousLayers) { node.Weights.Add(prevLayer.Nodes[nodePosition], filterWeightMap[prevLayer][j]); } } nodes.Add(node); } Nodes = nodes.ToArray(); }
double[][] weights; // матрица весов public NeuralLayer(int inputsSize, int outputsSize, NeuralLayerType type, ActivationFunctionType activationType = ActivationFunctionType.sigmoid) { this.inputsSize = inputsSize; this.outputsSize = outputsSize; this.type = type; this.activationType = activationType; Random random = new Random(); inputs = new double[inputsSize]; weights = new double[outputsSize][]; for (int i = 0; i < outputsSize; i++) { weights[i] = new double[inputsSize]; for (int j = 0; j < inputsSize; j++) { if (type == NeuralLayerType.input) { weights[i][j] = i == j ? 1 : 0; } else { weights[i][j] = random.NextDouble(); } } } }
/// <summary> /// Нейрон, преобразованный из карты изображения. /// </summary> /// <param name="inputs">Входные данные.</param> /// <param name="weights">Веса.</param> /// <param name="cell">Ячейка карты изображений, из которой преобразован нейрон.</param> /// <param name="type">Тип ункции активации (сигмоид по-умолчанию).</param> public NeuronFromMap(List <double> inputs, List <double> weights, ActivationFunctionType type = ActivationFunctionType.Sigmoid) : base(inputs, weights, type) { Inputs = inputs; Weights = weights; ActivationFinctionType = type; }
public static (ActivationFunction, ActivationFunction) GetActivations(ActivationFunctionType type) { switch (type) { case ActivationFunctionType.Sigmoid: return(ActivationFunctions.Sigmoid, ActivationFunctions.SigmoidPrime); case ActivationFunctionType.Tanh: return(ActivationFunctions.Tanh, ActivationFunctions.TanhPrime); case ActivationFunctionType.LeCunTanh: return(ActivationFunctions.LeCunTanh, ActivationFunctions.LeCunTanhPrime); case ActivationFunctionType.ReLU: return(ActivationFunctions.ReLU, ActivationFunctions.ReLUPrime); case ActivationFunctionType.LeakyReLU: return(ActivationFunctions.LeakyReLU, ActivationFunctions.LeakyReLUPrime); case ActivationFunctionType.AbsoluteReLU: return(ActivationFunctions.AbsoluteReLU, ActivationFunctions.AbsoluteReLUPrime); case ActivationFunctionType.Softmax: return(ActivationFunctions.Softmax, null); case ActivationFunctionType.Softplus: return(ActivationFunctions.Softplus, ActivationFunctions.Sigmoid); case ActivationFunctionType.ELU: return(ActivationFunctions.ELU, ActivationFunctions.ELUPrime); case ActivationFunctionType.Identity: return(ActivationFunctions.Identity, ActivationFunctions.Identityprime); default: throw new ArgumentOutOfRangeException(nameof(ActivationFunctionType), "Unsupported activation function"); } }
public InternalNodeGene ChangeFunction(ActivationFunctionType newType, int geneID) { ActivationFunction function = ActivationFunction.GetRandomInitializedFunction(newType); InternalNodeGene gene = new InternalNodeGene(geneID, nodeID, function); return(gene); }
public Perceptron(ActivationFunctionType type, int[] neuronsPerLayer, bool withSoftmax, double epsilon, double alpha) { this.neuronsPerLayer = neuronsPerLayer; WithSoftmax = withSoftmax; ActivationType = type; Epsilon = epsilon; Alpha = alpha; SetActivationFunc(ActivationType); L = new List <Neuron[]>(neuronsPerLayer.Length); for (int l = 0; l < neuronsPerLayer.Length; l++) { bool isOutputLayer = l == neuronsPerLayer.Length - 1; L.Add(CreateLayer(neuronsPerLayer[l], isOutputLayer)); } W = new List <double[, ]>(L.Count); DeltaW = new List <double[, ]>(L.Count); for (int l = 0; l < L.Count - 1; l++) { bool onOutput = l + 1 == L.Count - 1; W.Add(CreateWeights(L[l].Length, L[l + 1].Length - (onOutput ? 0 : 1))); DeltaW.Add(new double[neuronsPerLayer[l], neuronsPerLayer[l + 1]]); } }
public static ActivationFunction GetRandomInitializedFunction(ActivationFunctionType type) { switch (type) { case ActivationFunctionType.Sinusodial: return(new SinusFunction()); case ActivationFunctionType.Gaussian: return(new GaussianFunction()); case ActivationFunctionType.AbsoluteValue: return(new AbsoluteValueFunction()); case ActivationFunctionType.PyramidAbsoluteValue: return(new PyramidAbsoluteValueFunction()); case ActivationFunctionType.Modulo: return(new ModuloFunction()); case ActivationFunctionType.Linear: return(new LinearFunction()); case ActivationFunctionType.Sigmoid: return(new SigmoidFunction()); case ActivationFunctionType.Sensor: return(null); default: return(null); } }
internal override LayerVertex Deserialize(IReadOnlyDictionary <string, object> properties) { base.Deserialize(properties); Enum.TryParse(properties[nameof(Kind)].ToString(), out ActivationFunctionType type); ActivationFunction = type; return(this); }
public Neuron(ActivationFunctionType type = ActivationFunctionType.None) { this.input = 0; this.outpt = 0; this.connectionInputs = new List <Connection>(); this.connectionOutputs = new List <Connection>(); this.SetActivationFunction(type); }
public Network(string trainingSetFile, List<int> networkStructure, ActivationFunctionType activationFunctionType, bool bias) { fType = activationFunctionType; LoadTrainData(trainingSetFile); CreateNetwork(networkStructure, bias); }
public static ActivationLayerVertex ActivationLayer(string name, ActivationFunctionType activationType = ActivationFunctionType.ReLU) { return(new ActivationLayerVertex(name) { ActivationFunction = activationType }); }
public Neuron(Range sensitivity, ActivationFunctionType f_type) { this.sensitivity = sensitivity; activationFunc = new ActivationFunction(f_type); synapses = new List <Synapse>(); axon = new Axon(); }
/// <summary> /// A function that returns activation function. /// </summary> /// <param name="type">Type of activation function.</param> /// <returns>Selected function.</returns> private static Func <double, double> GetFunction(ActivationFunctionType type) { return(type switch { ActivationFunctionType.Linear => (i => i), ActivationFunctionType.Sigmoid => (i => 1 / (1 + Math.Exp(-i))), ActivationFunctionType.TanH => (i => Math.Tanh(i)), ActivationFunctionType.ReLU => (i => Math.Max(0, i)), ActivationFunctionType.LReLU => (i => Math.Max(i * 0.01, i)) });
/// <summary> /// Add layer. /// </summary> /// <param name="amountNeurons">Ammount of neurons in layer.</param> /// <param name="type">Type of activation functions in neurons.</param> public void AddLayer(int amountNeurons, ActivationFunctionType type) { this.Layers.Add(new Layer(amountNeurons, type)); if (this.Layers.Count > 1) { int lastLayer = this.Layers.Count - 1; this.ConnectLayers(this.Layers[lastLayer - 1], this.Layers[lastLayer]); } }
/** * Add a named node to this genotype. */ public void AddNamedNode(string name, NodeType nodeType, ActivationFunctionType activationFuncName) { if (NodeNameMap.ContainsKey(name)) { throw new ArgumentException(string.Format("{0} is already a named node.", name)); } this.NodeGenes.Add(new NodeGene(NodeIdStart, nodeType, activationFuncName)); this.NodeNameMap[name] = NodeIdStart; NodeIdStart += 1; }
public Perceptron(int numIn, ActivationFunctionType type) { X = new double[numIn]; W = new double[numIn]; Bias = rand.NextDouble(); for (int i = 0; i < W.Length; i++) { W[i] = rand.NextDouble(); } FunctionType = type; }
public static Filter1D[] Add1DConvolutionalLayer(this Layer1D[] inputs, int filterCount, int filterSize, ActivationFunctionType activationFunction, InitialisationFunctionType initialisationFunction) { var filters = new Filter1D[filterCount]; for (var i = 0; i < filterCount; i++) { filters[i] = new Filter1D(inputs, filterSize, activationFunction, initialisationFunction); } return(filters); }
// Main constructor public NeuralNetwork(int inputNeuronCount, int outputNeuronCount, ActivationFunctionType activationFunctionType, params int[] hiddenNeuronCounts) { // Check input validity if (inputNeuronCount <= 0) { throw new ArgumentException("Input neuron count must be positive, non-zero"); } if (outputNeuronCount <= 0) { throw new ArgumentException("Output neuron count must be positive, non-zero"); } if (hiddenNeuronCounts.Contains(0)) { throw new ArgumentException("hiddenNeuronsCounts contains zero-length layer, this is illegal"); } ActivationFunctionType = activationFunctionType; // Input ➡ First Hidden connections.Add(0, new DoubleMatrix(hiddenNeuronCounts[0], inputNeuronCount, MatrixInitMode.RanNorm)); biases.Add(0, new DoubleMatrix(hiddenNeuronCounts[0], 1, MatrixInitMode.RanNorm)); int i; for (i = 1; i < hiddenNeuronCounts.Length; i++) { // i-1'th ➡ i'th hidden layer connection matrix connections.Add(i, new DoubleMatrix(hiddenNeuronCounts[i], hiddenNeuronCounts[i - 1], MatrixInitMode.RanNorm)); // i'th layer bias biases.Add(i, new DoubleMatrix(hiddenNeuronCounts[i], 1, MatrixInitMode.RanNorm)); } // Last hidden ➡ output layer connection connections.Add(i, new DoubleMatrix(outputNeuronCount, hiddenNeuronCounts[i - 1], MatrixInitMode.RanNorm)); // Output bias outputBias = new DoubleMatrix(outputNeuronCount, 1, MatrixInitMode.RanNorm); // Set activation function switch (activationFunctionType) { case ActivationFunctionType.Sigmoid: activation = SigmoidActivationFunction; activationD = SigmoidActivationFunctionDerivative; break; case ActivationFunctionType.HyperbolicTangent: activation = HyperbolicTangentActivationFunction; activationD = HyperbolicTangentActivationFunctionDerivative; break; default: break; } LearningRate = 0.1; }
public Layer( Node[] nodes, Layer[] previousLayers, ActivationFunctionType activationFunctionType = ActivationFunctionType.RELU, InitialisationFunctionType initialisationFunctionType = InitialisationFunctionType.HeEtAl) { Nodes = nodes; PreviousLayers = previousLayers; ActivationFunctionType = activationFunctionType; InitialisationFunctionType = initialisationFunctionType; }
public FullyConnected(ReadOnlySpan <int> dimensions, Tensor <float> weights, Tensor <float> bias, ActivationFunctionType fusedActivationFunction) { FusedActivationFunction = fusedActivationFunction; Weights = weights; Bias = bias; Input = AddInput("input", dimensions); Output = AddOutput("output", new[] { dimensions[0], weights.Dimensions[0] }); }
public Layer(int amountNeurons, ActivationFunctionType functionType) { if (amountNeurons < 1) { string error = String.Format("Too smal amount of neurons, number of neurons = {0} must be greather than 0.", amountNeurons); throw new Exception(error); } for (int i = 0; i < amountNeurons; i++) { Neurons.Add(new Neuron(functionType)); } }
public static void CalculateNeuronOutput(NeuronForManipulation neuron, ActivationFunctionType activationFunction, double Alpha, IList <double> inputs) { neuron.Weights = neuron.Weights.OrderBy(a => a.Index).ToList(); double outputSum = 0; for (int i = 0; i < neuron.Weights.Count; i++) { outputSum += inputs[i] * neuron.Weights[i].Weight; } outputSum += neuron.Bias; neuron.Output = NeuronActiveFunctionResult(activationFunction, Alpha, outputSum); }
// Ro'(Z) public static double ActivationFunctionDerivative(double x, ActivationFunctionType functionType) { switch (functionType) { case ActivationFunctionType.Sigmoid: return(ActivationFunction(x, ActivationFunctionType.Sigmoid) * (1 - ActivationFunction(x, ActivationFunctionType.Sigmoid))); //return Math.Exp(-x) / Math.Pow(1 + Math.Exp(-x), 2); case ActivationFunctionType.ReLU: return(x > 0 ? 1 : 0); } throw new ArgumentException("Not supported function type"); }
/// <summary> /// Производная от функции активации (сигмоид). /// </summary> /// <param name="neuronOutput">Вывод нейрона.</param> /// <returns>Вовзращает результат производной функции активации (сигмоид) нейрона.</returns> internal static double DerivativeActivationFunction(double neuronOutput, ActivationFunctionType type) { switch (type) { case ActivationFunctionType.Sigmoid: return((1 - neuronOutput) * neuronOutput); case ActivationFunctionType.HyperTan: return(1 - Math.Pow(neuronOutput, 2)); default: throw new Exception("Неизвестный тип функции активации!"); } }
/// <summary> /// Вычисляет значение функции активации. /// </summary> /// <param name="type">Тип функции активации.</param> /// <param name="summary">Сумма входов.</param> /// <returns>Возвращает значение функции активации.</returns> internal static double ActivationFunction(ActivationFunctionType type, double summary) { switch (type) { case ActivationFunctionType.Sigmoid: return(Math.Pow(1 + Math.Exp(-summary), -1)); case ActivationFunctionType.HyperTan: return((Math.Exp(2 * summary) - 1) / (Math.Exp(2 * summary) + 1)); default: throw new Exception("Неизвестный тип функции активации!"); } }
public Layer(int nodeCount, Layer[] previousGroups, ActivationFunctionType activationFunctionType, InitialisationFunctionType initialisationFunctionType, bool addBiasWeights = true) { ActivationFunctionType = activationFunctionType; InitialisationFunctionType = initialisationFunctionType; PreviousLayers = previousGroups; var nodes = new Node[nodeCount]; for (var i = 0; i < nodeCount; i++) { nodes[i] = new Node(previousGroups, addBiasWeights); } Nodes = nodes; }
public MLPNetwork(int layersCount, int neuronsCount, bool bias, ActivationFunctionType aft, ProblemType problemType, string inputFileName) { this.layersCount = layersCount; this.neuronsCount = neuronsCount; this.bias = bias; this.activationFunType = aft; this.problemType = problemType; LoadTrainingData(inputFileName); network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, bias, trainingData.InputSize)); for (int i = 0; i < layersCount; i++) network.AddLayer(new BasicLayer(CreateActivationFunction(), bias, neuronsCount)); network.AddLayer(new BasicLayer(CreateActivationFunction(), false, outputSize)); network.Structure.FinalizeStructure(); network.Reset(); }
public RegressionNetwork(string trainingSetFile, List<int> networkStructure, ActivationFunctionType activationFunctionType, bool bias) : base(trainingSetFile, networkStructure, activationFunctionType, bias) { }