public AxonGene GetGenes() { return(new AxonGene { ActivationFunction = _activationFunction.GetType(), Weights = _terminals.Select(d => d.Weight).ToList() }); }
private String MakeActivationFunctionString(IActivationFunction act) { StringBuilder result = new StringBuilder(); result.Append(act.GetType().Name); for (int i = 0; i < act.Params.Length; i++) { result.Append('|'); result.Append(CSVFormat.EgFormat.Format(act.Params[i], EncogFramework.DefaultPrecision)); } return(result.ToString()); }
private void updateGraph() { Type functionType = cbActivationFunction.SelectedItem as Type; if (functionType != null && (function == null || functionType != function.GetType())) { this.function = (IActivationFunction)Activator.CreateInstance(functionType); this.propertyGrid.SelectedObject = function; this.activationFunctionView.Function = function; this.activationFunctionView.Domain = new DoubleRange(-1.0, 1.0); this.activationFunctionView.Steps = 50; this.activationFunctionView.Plot(); } }
public void Plot() { if (this.function == null) { throw new InvalidOperationException(); } zedGraphControl.GraphPane.CurveList.Clear(); double x, y; double stepSize = plotDomain.Length / plotSteps; PointPairList list = new PointPairList(); for (int i = 0; i < plotSteps; i++) { x = this.plotDomain.Min + ((double)i * stepSize); switch (this.derivative) { case FunctionDerivative.None: y = this.function.Function(x); break; case FunctionDerivative.First: y = this.function.Derivative(x); break; case FunctionDerivative.Second: y = this.function.Derivative2(x); break; default: goto case FunctionDerivative.None; } list.Add(x, y); } LineItem curve = zedGraphControl.GraphPane. AddCurve(function.GetType().Name, list, Color.Red, SymbolType.None); //curve.Line.IsSmooth = true; //curve.Line.SmoothTension = 0.1f; curve.Line.IsAntiAlias = true; curve.Line.Width = 2f; zedGraphControl.AxisChange(); zedGraphControl.Invalidate(); }
/// <summary> /// Write a property as an activation function. /// </summary> /// /// <param name="name">The name of the property.</param> /// <param name="act">The activation function.</param> public void WriteProperty(String name, IActivationFunction act) { var result = new StringBuilder(); result.Append(act.GetType().Name); for (int i = 0; i < act.Params.Length; i++) { result.Append('|'); result.Append(CSVFormat.EgFormat.Format(act.Params[i], EncogFramework.DefaultPrecision)); } WriteProperty(name, result.ToString()); }
/// <summary> /// Save the activation function. /// </summary> /// <param name="activationFunction">The activation function.</param> /// <param name="xmlOut">The XML.</param> public static void SaveActivationFunction( IActivationFunction activationFunction, WriteXML xmlOut) { if (activationFunction != null) { xmlOut.BeginTag(BasicLayerPersistor.TAG_ACTIVATION); xmlOut.BeginTag(activationFunction.GetType().Name); String[] names = activationFunction.ParamNames; for (int i = 0; i < names.Length; i++) { String str = names[i]; double d = activationFunction.Params[i]; xmlOut.AddAttribute(str, "" + CSVFormat.EG_FORMAT.Format(d, 10)); } xmlOut.EndTag(); xmlOut.EndTag(); } }
private String ToSingleLineArray( IActivationFunction[] activationFunctions) { var result = new StringBuilder(); result.Append('['); for (int i = 0; i < activationFunctions.Length; i++) { if (i > 0) { result.Append(','); } IActivationFunction af = activationFunctions[i]; if (af is ActivationSigmoid) { result.Append("ENCOG.ActivationSigmoid.create()"); } else if (af is ActivationTANH) { result.Append("ENCOG.ActivationTANH.create()"); } else if (af is ActivationLinear) { result.Append("ENCOG.ActivationLinear.create()"); } else if (af is ActivationElliott) { result.Append("ENCOG.ActivationElliott.create()"); } else if (af is ActivationElliott) { result.Append("ENCOG.ActivationElliott.create()"); } else { throw new AnalystCodeGenerationError( "Unsupported activatoin function for code generation: " + af.GetType().Name); } } result.Append(']'); return(result.ToString()); }
//Methods private void TestActivation(IActivationFunction af, int simLength, double constCurrent, int from, int count) { for (int i = 1; i <= simLength; i++) { double signal; double input; if (i >= from && i < from + count) { input = double.IsNaN(constCurrent) ? _rand.NextDouble() : constCurrent; } else { input = 0d; } signal = af.Compute(input); Console.WriteLine($"{af.GetType().Name} step {i}, State {(af.TypeOfActivation == ActivationType.Spiking ? af.InternalState : signal)} signal {signal}"); } Console.ReadLine(); return; }
public override List <string> DescribeSelf() { List <String> output = new List <string>(); var pair = states.FirstOrDefault(); output.Add("Classification using neural network [" + pair.Value.machine.GetType().Name + "] with " + pair.Value.machine.Layers.Count() + " layers."); output.Add("Supervised learning [" + teacherRef.GetType().Name + "] with LearningRate = " + setup.neuralnetwork.learningRate + " and Momentum = " + setup.neuralnetwork.momentum + "."); output.Add("Learning in max. iterations [" + setup.neuralnetwork.learningIterationsMax + "], terminated earlier if error rate is lower then [" + setup.neuralnetwork.errorLowerLimit.ToString("F5") + "]"); // output.Add("Input layer [0] -> [" + pair.Value.data.NumberOfInputs + "]"); for (int i = 0; i < pair.Value.machine.Layers.Length; i++) { Layer l = pair.Value.machine.Layers[i]; output.Add("Layer [" + (i) + "] -> In[" + l.InputsCount + "] -> Neurons[" + l.Neurons.Length + "] -> Out[" + l.Output.Length + "]"); } output.Add("Output layer [" + (pair.Value.machine.Layers.Length) + "] -> [" + pair.Value.data.NumberOfClasses + "]"); output.Add("Neuron function [" + activationFunction.GetType().Name + "]. Trained with [" + pair.Value.data.NumberOfCases + "] cases."); return(output); }
private String MakeActivationFunctionString(IActivationFunction act) { StringBuilder result = new StringBuilder(); result.Append(act.GetType().Name); for (int i = 0; i < act.Params.Length; i++) { result.Append('|'); result.Append(CSVFormat.EgFormat.Format(act.Params[i], EncogFramework.DefaultPrecision)); } return result.ToString(); }
private void CanItLearnRulesWith(IList<IMLDataPair> inputData, IList<IMLDataPair> verfData, int hiddenLayerCount, int neuronCount, IActivationFunction actFunc, double learnRate, double momentum, int batchSize, int maxEpochs) { var model = new DbModel(); var funcName = actFunc.GetType().Name; var tdCount = inputData.Count(); if (model.TicTacToeResult.Any(r => r.HiddenLayerCount == hiddenLayerCount && r.NeuronPerLayercount == neuronCount && r.ActivationFunction == funcName && r.LearningRate == learnRate && r.BatchSize == batchSize && r.Momentum == momentum && r.Name == Name && r.Epochs == maxEpochs && r.TrainingDataCount == tdCount)) return; var nn = CreateNetwork(inputData, hiddenLayerCount, neuronCount, actFunc); var train = new Backpropagation(nn, new BasicMLDataSet(inputData), learnRate, momentum); train.BatchSize = batchSize; int epoch = 1; do { train.Iteration(); epoch++; } while (epoch < maxEpochs); int good = verfData.Count(verf => { var output = nn.Compute(verf.Input); return Enumerable.Range(0, 9).All(i => Math.Round(output[i]) == Math.Round(verf.Ideal[i])); }); int bad = VerfDataCount - good; var result = new TicTacToeResult() { HiddenLayerCount = hiddenLayerCount, NeuronPerLayercount = neuronCount, ActivationFunction = funcName, Bad = bad, Good = good, TrainingDataCount = tdCount, Momentum = momentum, LearningRate = learnRate, BatchSize = batchSize, Epochs = epoch, Error = train.Error, Name = Name, }; model.TicTacToeResult.Add(result); model.SaveChanges(); }
public void WriteProperty(string name, IActivationFunction act) { StringBuilder builder = new StringBuilder(); builder.Append(act.GetType().Name); int index = 0; goto Label_0030; Label_0011: builder.Append(CSVFormat.EgFormat.Format(act.Params[index], 10)); index++; Label_0030: if (index < act.Params.Length) { builder.Append('|'); goto Label_0011; } if ((((uint) index) | 3) == 0) { goto Label_0011; } this.WriteProperty(name, builder.ToString()); }
public LayerUnit(int length, IActivationFunction actfunc) { var q = from i in Enumerable.Range(0, length) select new NeuronUnit { sign = 1.0f, activation = 0.0f, bias = UnityEngine.Random.value, af = actfunc != null ? (IActivationFunction)Activator.CreateInstance(actfunc.GetType()) : null } ; this.neurons = q.ToArray(); }
//Methods /// <summary> /// Builds name of the specified activation function /// </summary> /// <param name="activationCfg">Activation function configuration</param> private string GetActivationName(RCNetBaseSettings activationCfg) { IActivationFunction aFn = ActivationFactory.Create(activationCfg, _rand); return(aFn.TypeOfActivation.ToString() + "-" + aFn.GetType().Name.Replace("Settings", string.Empty)); }