private void CBAktywacje_SelectionChanged(object sender, System.Windows.Controls.SelectionChangedEventArgs e) { ComboBoxItem typeItem = (ComboBoxItem)CBAktywacje.SelectedItem; string value = typeItem.Content.ToString(); switch (value) { case "Linear": ActivationFunction = new ActivationLinear(); break; case "LOG": ActivationFunction = new ActivationLOG(); break; case "Sigmoid": ActivationFunction = new ActivationSigmoid(); break; case "SIN": ActivationFunction = new ActivationSIN(); break; case "TANH": ActivationFunction = new ActivationTANH(); break; } }
public IActivationFunction GetActivationFunction() { IActivationFunction activation; switch (ActivationFunction) { case ActivationFunctionType.Linear: activation = new ActivationLinear(); break; case ActivationFunctionType.Sigmoid: activation = new ActivationSigmoid(); break; case ActivationFunctionType.TanH: activation = new ActivationTANH(); break; case ActivationFunctionType.SoftMax: activation = new ActivationSoftMax(); break; case ActivationFunctionType.ReLU: activation = new ActivationReLU(); break; default: throw new ArgumentOutOfRangeException(); } return(activation); }
/// <summary> /// Create a feed forward network. /// </summary> /// /// <param name="architecture">The architecture string to use.</param> /// <param name="input">The input count.</param> /// <param name="output">The output count.</param> /// <returns>The feedforward network.</returns> public IMLMethod Create(String architecture, int input, int output) { var result = new BasicNetwork(); IList <String> layers = ArchitectureParse.ParseLayers(architecture); IActivationFunction af = new ActivationLinear(); int questionPhase = 0; foreach (String layerStr in layers) { // determine default int defaultCount = questionPhase == 0 ? input : output; ArchitectureLayer layer = ArchitectureParse.ParseLayer( layerStr, defaultCount); bool bias = layer.Bias; String part = layer.Name; part = part != null?part.Trim() : ""; IActivationFunction lookup = _factory.Create(part); if (lookup != null) { af = lookup; } else { if (layer.UsedDefault) { questionPhase++; if (questionPhase > 2) { throw new EncogError("Only two ?'s may be used."); } } if (layer.Count == 0) { throw new EncogError("Unknown architecture element: " + architecture + ", can't parse: " + part); } result.AddLayer(new BasicLayer(af, bias, layer.Count)); } } result.Structure.FinalizeStructure(); result.Reset(); return(result); }
/// <summary> /// Construct a flat neural network. /// </summary> /// /// <param name="input">Neurons in the input layer.</param> /// <param name="hidden1"></param> /// <param name="hidden2"></param> /// <param name="output">Neurons in the output layer.</param> /// <param name="tanh">True if this is a tanh activation, false for sigmoid.</param> public FlatNetwork(int input, int hidden1, int hidden2, int output, bool tanh) { IActivationFunction linearAct = new ActivationLinear(); FlatLayer[] layers; IActivationFunction act = (tanh) ? (new ActivationTANH()) : (IActivationFunction)(new ActivationSigmoid()); if ((hidden1 == 0) && (hidden2 == 0)) { layers = new FlatLayer[2]; layers[0] = new FlatLayer(linearAct, input, DefaultBiasActivation); layers[1] = new FlatLayer(act, output, NoBiasActivation); } else if ((hidden1 == 0) || (hidden2 == 0)) { int count = Math.Max(hidden1, hidden2); layers = new FlatLayer[3]; layers[0] = new FlatLayer(linearAct, input, DefaultBiasActivation); layers[1] = new FlatLayer(act, count, DefaultBiasActivation); layers[2] = new FlatLayer(act, output, NoBiasActivation); } else { layers = new FlatLayer[4]; layers[0] = new FlatLayer(linearAct, input, DefaultBiasActivation); layers[1] = new FlatLayer(act, hidden1, DefaultBiasActivation); layers[2] = new FlatLayer(act, hidden2, DefaultBiasActivation); layers[3] = new FlatLayer(act, output, NoBiasActivation); } _isLimited = false; _connectionLimit = 0.0d; Init(layers); }
/// <summary> /// Setup for training. /// </summary> private void Init() { // default values ParamActivationMutationRate = 0.1; ParamChanceAddLink = 0.07; ParamChanceAddNode = 0.04; ParamChanceAddRecurrentLink = 0.05; ParamCompatibilityThreshold = 0.26; ParamCrossoverRate = 0.7; ParamMaxActivationPerturbation = 0.1; ParamMaxNumberOfSpecies = 0; ParamMaxPermittedNeurons = 100; ParamMaxWeightPerturbation = 0.5; ParamMutationRate = 0.2; ParamNumAddLinkAttempts = 5; ParamNumGensAllowedNoImprovement = 15; ParamNumTrysToFindLoopedLink = 5; ParamNumTrysToFindOldLink = 5; ParamProbabilityWeightReplaced = 0.1; NeatActivationFunction = new ActivationSigmoid(); OutputActivationFunction = new ActivationLinear(); // NEATGenome genome = (NEATGenome)Population.Genomes[0]; Population.Innovations = new NEATInnovationList(Population, genome.Links, genome.Neurons); splits = Split(null, 0, 1, 0); if (CalculateScore.ShouldMinimize) { bestEverScore = double.MaxValue; } else { bestEverScore = double.MinValue; } ResetAndKill(); SortAndRecord(); SpeciateAndCalculateSpawnLevels(); }
void AddLayers(List <LayerConfig> gen) { foreach (var g in gen) { IActivationFunction act; if (g.ActivationType == 0) { act = new ActivationBiPolar(); } switch (g.ActivationType) { case 0: act = new ActivationBiPolar(); break; case 1: act = new ActivationBipolarSteepenedSigmoid(); break; case 2: act = new ActivationClippedLinear(); break; case 3: act = new ActivationCompetitive(); break; case 4: act = new ActivationElliott(); break; case 5: act = new ActivationElliottSymmetric(); break; case 6: act = new ActivationGaussian(); break; case 7: act = new ActivationLinear(); break; case 8: act = new ActivationLOG(); break; case 9: act = new ActivationRamp(); break; case 10: act = new ActivationRamp(); break; case 11: act = new ActivationSigmoid(); break; case 12: act = new ActivationSIN(); break; case 13: act = new ActivationSoftMax(); break; case 14: act = new ActivationSteepenedSigmoid(); break; case 15: act = new ActivationStep(); break; case 16: act = new ActivationTANH(); break; default: act = new ActivationSoftMax(); break; } network.AddLayer(new BasicLayer(act, g.hasBias, g.neurons)); } }
/// <summary> /// Create the flat neural network. /// </summary> public void Flatten() { bool isRBF = false; IDictionary <ILayer, FlatLayer> regular2flat = new Dictionary <ILayer, FlatLayer>(); IDictionary <FlatLayer, ILayer> flat2regular = new Dictionary <FlatLayer, ILayer>(); IList <ObjectPair <ILayer, ILayer> > contexts = new List <ObjectPair <ILayer, ILayer> >(); this.flat = null; ValidateForFlat val = new ValidateForFlat(); if (val.IsValid(this.network) == null) { if (this.layers.Count == 3 && this.layers[1] is RadialBasisFunctionLayer) { RadialBasisFunctionLayer rbf = (RadialBasisFunctionLayer)this.layers[1]; this.flat = new FlatNetworkRBF(this.network.InputCount, rbf.NeuronCount, this.network.OutputCount, rbf.RadialBasisFunction); FlattenWeights(); this.flatUpdate = FlatUpdateNeeded.None; return; } int flatLayerCount = CountNonContext(); FlatLayer[] flatLayers = new FlatLayer[flatLayerCount]; int index = flatLayers.Length - 1; foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ISynapse inboundSynapse = network.Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); ISynapse outboundSynapse = network .Structure .FindNextSynapseByLayerType(layer, typeof(BasicLayer)); if (inboundSynapse == null) { throw new NeuralNetworkError( "Context layer must be connected to by one BasicLayer."); } if (outboundSynapse == null) { throw new NeuralNetworkError( "Context layer must connect to by one BasicLayer."); } ILayer inbound = inboundSynapse.FromLayer; ILayer outbound = outboundSynapse.ToLayer; contexts .Add(new ObjectPair <ILayer, ILayer>(inbound, outbound)); } else { double bias = this.FindNextBias(layer); IActivationFunction activationType; double[] param = new double[1]; if (layer.ActivationFunction == null) { activationType = new ActivationLinear(); param = new double[1]; param[0] = 1; } else { activationType = layer.ActivationFunction; param = layer.ActivationFunction.Params; } FlatLayer flatLayer = new FlatLayer(activationType, layer .NeuronCount, bias, param); regular2flat[layer] = flatLayer; flat2regular[flatLayer] = layer; flatLayers[index--] = flatLayer; } } // now link up the context layers foreach (ObjectPair <ILayer, ILayer> context in contexts) { // link the context layer on the FlatLayer ILayer layer = context.B; ISynapse synapse = this.network .Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); FlatLayer from = regular2flat[context.A]; FlatLayer to = regular2flat[synapse.FromLayer]; to.ContextFedBy = from; } this.flat = new FlatNetwork(flatLayers); // update the context indexes on the non-flat network for (int i = 0; i < flatLayerCount; i++) { FlatLayer fedBy = flatLayers[i].ContextFedBy; if (fedBy != null) { ILayer fedBy2 = flat2regular[flatLayers[i + 1]]; ISynapse synapse = FindPreviousSynapseByLayerType(fedBy2, typeof(ContextLayer)); if (synapse == null) { throw new NeuralNetworkError("Can't find parent synapse to context layer."); } ContextLayer context = (ContextLayer)synapse.FromLayer; // find fedby index int fedByIndex = -1; for (int j = 0; j < flatLayerCount; j++) { if (flatLayers[j] == fedBy) { fedByIndex = j; break; } } if (fedByIndex == -1) { throw new NeuralNetworkError("Can't find layer feeding context."); } context.FlatContextIndex = this.flat.ContextTargetOffset[fedByIndex]; } } // RBF networks will not train every layer if (isRBF) { this.flat.EndTraining = flatLayers.Length - 1; } FlattenWeights(); if (this.IsConnectionLimited) { } this.flatUpdate = FlatUpdateNeeded.None; } else { this.flatUpdate = FlatUpdateNeeded.Never; } }