//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="numOfNeurons">The number of neurons.</param> /// <param name="activationCfg">The configuration of the activation function.</param> public HiddenLayerSettings(int numOfNeurons, IActivationSettings activationCfg) { NumOfNeurons = numOfNeurons; ActivationCfg = (IActivationSettings)activationCfg.DeepClone(); Check(); return; }
public ActivationCode(IActivationSettings settings) { if (settings.ActivationCode.HasValue) { _activationCodes.Add(settings.ActivationCode.Value.ToHexString()); } if (!HasActivationCode) { ActivationCodeRegistry registryCodes = new ActivationCodeRegistry(); if (registryCodes.HasActivationCode) { _activationCodes = _activationCodes.Union(registryCodes.ActivationCodes).ToList(); } } if (!HasActivationCode) { ActivationCodeApp appCodes = new ActivationCodeApp(); if (appCodes.HasActivationCode) { _activationCodes = _activationCodes.Union(appCodes.ActivationCodes).ToList(); } } }
//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="outputActivationCfg">Configuration of the output layer activation function.</param> /// <param name="hiddenLayersCfg">The configuration of the hidden layers. Hidden layers are optional.</param> /// <param name="trainerCfg">The configuration of the associated trainer.</param> public FeedForwardNetworkSettings(IActivationSettings outputActivationCfg, HiddenLayersSettings hiddenLayersCfg, RCNetBaseSettings trainerCfg ) { OutputActivationCfg = (IActivationSettings)outputActivationCfg.DeepClone(); HiddenLayersCfg = hiddenLayersCfg == null ? new HiddenLayersSettings() : (HiddenLayersSettings)hiddenLayersCfg.DeepClone(); TrainerCfg = trainerCfg.DeepClone(); Check(); return; }
/// <summary> /// Creates an initialized instance. /// </summary> /// <param name="elem">A xml element containing the configuration data.</param> public HiddenLayerSettings(XElement elem) { //Validation XElement settingsElem = Validate(elem, XsdTypeName); //Parsing NumOfNeurons = int.Parse(settingsElem.Attribute("neurons").Value); ActivationCfg = ActivationFactory.LoadSettings(settingsElem.Elements().First()); Check(); return; }
/// <summary> /// Tests whether the activation function can be used as the FF network's hidden layer activation. /// </summary> /// <param name="activationCfg">The configuration of the activation function.</param> public static bool IsAllowedHiddenAF(IActivationSettings activationCfg) { if (activationCfg.TypeOfActivation != ActivationType.Analog) { return(false); } AFAnalogBase analogAF = (AFAnalogBase)ActivationFactory.CreateAF(activationCfg, new Random(0)); if (!analogAF.SupportsDerivative || analogAF.DependsOnSorround) { return(false); } return(true); }
/// <summary> /// Creates the configuration of the feed forward network having the hidden layers, the Identity output layer and associated the resilient backpropagation trainer. /// </summary> /// <param name="hiddenLayerSize">The number of hidden layer neurons.</param> /// <param name="hiddenLayerAFnCfg">The configuration of the hidden layer activation.</param> /// <param name="numOfHiddenLayers">The number of hidden layers.</param> /// <param name="numOfAttempts">The number of regression attempts.</param> /// <param name="numOfEpochs">The number of training epochs within an attempt.</param> public static FeedForwardNetworkSettings CreateMultiLayerFFNetCfg(int hiddenLayerSize, IActivationSettings hiddenLayerAFnCfg, int numOfHiddenLayers, int numOfAttempts, int numOfEpochs ) { List <HiddenLayerSettings> hiddenLayerCollection = new List <HiddenLayerSettings>(numOfHiddenLayers); for (int i = 0; i < numOfHiddenLayers; i++) { hiddenLayerCollection.Add(new HiddenLayerSettings(hiddenLayerSize, hiddenLayerAFnCfg)); } HiddenLayersSettings hiddenLayersCfg = new HiddenLayersSettings(hiddenLayerCollection); return(new FeedForwardNetworkSettings(new AFAnalogIdentitySettings(), hiddenLayersCfg, new RPropTrainerSettings(numOfAttempts, numOfEpochs))); }
//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="name">The name of the neuron group.</param> /// <param name="relShare">Specifies how big relative portion of pool's neurons is formed by this group of the neurons.</param> /// <param name="activationCfg">The common configuration of the neurons' activation function.</param> /// <param name="predictorsCfg">The common configuration of the predictors provider.</param> /// <param name="homogenousExcitabilityCfg">The configuration of the neurons homogenous excitability.</param> /// <param name="biasCfg">The configuration of the constant input bias.</param> public SpikingNeuronGroupSettings(string name, double relShare, IActivationSettings activationCfg, PredictorsProviderSettings predictorsCfg, HomogenousExcitabilitySettings homogenousExcitabilityCfg = null, RandomValueSettings biasCfg = null ) { Name = name; RelShare = relShare; ActivationCfg = (IActivationSettings)activationCfg.DeepClone(); PredictorsCfg = (PredictorsProviderSettings)predictorsCfg.DeepClone(); HomogenousExcitabilityCfg = homogenousExcitabilityCfg == null ? new HomogenousExcitabilitySettings() : (HomogenousExcitabilitySettings)homogenousExcitabilityCfg.DeepClone(); BiasCfg = biasCfg == null ? null : (RandomValueSettings)biasCfg.DeepClone(); Check(); return; }
/// <summary> /// Creates the configuration of neuron group having the specified spiking activation function. /// </summary> /// <param name="activationCfg">The activation function configuration.</param> /// <param name="predictorsCfg">The predictors provider configuration.</param> /// <param name="excitabilityCfg">The configuration of the homogenous excitability.</param> /// <param name="steadyBias">The constant bias (0 means no bias).</param> private SpikingNeuronGroupSettings CreateSpikingGroup(IActivationSettings activationCfg, PredictorsProviderSettings predictorsCfg, HomogenousExcitabilitySettings excitabilityCfg, double steadyBias = 0d ) { //Bias configuration RandomValueSettings biasCfg = steadyBias == 0 ? null : new RandomValueSettings(steadyBias, steadyBias); //Create neuron group configuration SpikingNeuronGroupSettings groupCfg = new SpikingNeuronGroupSettings(BuildNeuronGroupName(activationCfg), 1d, activationCfg, predictorsCfg, excitabilityCfg, biasCfg ); return(groupCfg); }
//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="name">The name of the neuron group.</param> /// <param name="relShare">Specifies how big relative portion of pool's neurons is formed by this group of the neurons.</param> /// <param name="activationCfg">The common configuration of the neurons' activation function.</param> /// <param name="predictorsCfg">The common configuration of the predictors provider.</param> /// <param name="firingThreshold">The firing threshold value. Every time the current normalized activation is higher than the normalized past reference activation by at least this threshold, it is evaluated as a firing event.</param> /// <param name="thresholdMaxRefDeepness">Maximum age of the past activation for the evaluation of the firing event.</param> /// <param name="biasCfg">The configuration of the constant input bias.</param> /// <param name="retainmentCfg">The configuration of the neurons' retainment property.</param> public AnalogNeuronGroupSettings(string name, double relShare, IActivationSettings activationCfg, PredictorsProviderSettings predictorsCfg, double firingThreshold = DefaultFiringThreshold, int thresholdMaxRefDeepness = DefaultThresholdMaxRefDeepness, RandomValueSettings biasCfg = null, RetainmentSettings retainmentCfg = null ) { Name = name; RelShare = relShare; ActivationCfg = (IActivationSettings)activationCfg.DeepClone(); PredictorsCfg = (PredictorsProviderSettings)predictorsCfg.DeepClone(); FiringThreshold = firingThreshold; ThresholdMaxRefDeepness = thresholdMaxRefDeepness; BiasCfg = biasCfg == null ? null : (RandomValueSettings)biasCfg.DeepClone(); RetainmentCfg = retainmentCfg == null ? null : (RetainmentSettings)retainmentCfg.DeepClone(); Check(); return; }
/// <summary> /// Creates the configuration of neuron group having the specified analog activation function. /// </summary> /// <param name="activationCfg">The activation function configuration.</param> /// <param name="predictorsCfg">The predictors provider configuration.</param> /// <param name="maxAbsBias">The maximum absolute value of the bias (0 means no bias).</param> /// <param name="maxRetainmentStrength">The maximum retainment strength (0 means no retainment).</param> private AnalogNeuronGroupSettings CreateAnalogGroup(IActivationSettings activationCfg, PredictorsProviderSettings predictorsCfg, double maxAbsBias = 0d, double maxRetainmentStrength = 0d ) { //Bias configuration RandomValueSettings biasCfg = maxAbsBias == 0 ? null : new RandomValueSettings(-maxAbsBias, maxAbsBias); //Retainment configuration const double RetainmentDensity = 1d; RetainmentSettings retainmentCfg = maxRetainmentStrength == 0 ? null : new RetainmentSettings(RetainmentDensity, new URandomValueSettings(0, maxRetainmentStrength)); //Create neuron group configuration AnalogNeuronGroupSettings groupCfg = new AnalogNeuronGroupSettings(BuildNeuronGroupName(activationCfg), 1d, activationCfg, predictorsCfg, AnalogNeuronGroupSettings.DefaultFiringThreshold, AnalogNeuronGroupSettings.DefaultThresholdMaxRefDeepness, biasCfg, retainmentCfg ); return(groupCfg); }
public ActivationCode(IActivationSettings settings) { _settings = settings; init(); }
/// <summary> /// Creates the simplified configuration of the state machine following the pure LSM design. /// </summary> /// <param name="totalSize">The total number of hidden neurons.</param> /// <param name="spikingActivationCfg">The configuration of the spiking activation function.</param> /// <param name="excitabilityCfg">The homogenous excitability configuration.</param> /// <param name="inputConnectionDensity">The density of the input field connections to hidden neurons.</param> /// <param name="maxInputDelay">The maximum delay of an input synapse.</param> /// <param name="interconnectionDensity">The density of the hidden neurons recurrent interconnection.</param> /// <param name="maxInternalDelay">The maximum delay of an internal synapse.</param> /// <param name="steadyBias">The constant bias (0 means no bias).</param> /// <param name="predictorsProviderCfg">The configuration of the predictors provider.</param> public StateMachineSettings CreatePureLSMCfg(int totalSize, IActivationSettings spikingActivationCfg, HomogenousExcitabilitySettings excitabilityCfg, double inputConnectionDensity, int maxInputDelay, double interconnectionDensity, int maxInternalDelay, double steadyBias, PredictorsProviderSettings predictorsProviderCfg ) { //Check NP is not bypassed if (BypassedNP) { throw new InvalidOperationException("Neural preprocessor is bypassed thus LSM design can't be created."); } //Activation check if (ActivationFactory.CreateAF(spikingActivationCfg, new Random()).TypeOfActivation != ActivationType.Spiking) { throw new ArgumentException("Specified activation must be spiking.", "spikingActivationCfg"); } //One neuron group SpikingNeuronGroupSettings grp = CreateSpikingGroup(spikingActivationCfg, predictorsProviderCfg, excitabilityCfg, steadyBias); //Simple spiking pool PoolSettings poolCfg = new PoolSettings(BuildPoolName(ActivationContent.Spiking, 0), new ProportionsSettings(totalSize, 1, 1), new NeuronGroupsSettings(grp), new InterconnSettings(new RandomSchemaSettings(interconnectionDensity, 0d, false, false)) ); //Simple reservoir structure ReservoirStructureSettings resStructCfg = new ReservoirStructureSettings(BuildResStructName(ActivationContent.Spiking, 0), new PoolsSettings(poolCfg) ); //Input connections configuration List <InputConnSettings> inputConns = new List <InputConnSettings>(InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.FieldCfgCollection.Count); foreach (ExternalFieldSettings fieldCfg in InputEncoderCfg.VaryingFieldsCfg.ExternalFieldsCfg.FieldCfgCollection) { InputConnSettings inputConnCfg = new InputConnSettings(fieldCfg.Name, poolCfg.Name, inputConnectionDensity, 0 ); inputConns.Add(inputConnCfg); } //Synapse general configuration SpikingSourceSTInputSettings spikingSourceSTInputSettings = new SpikingSourceSTInputSettings(new URandomValueSettings(0, 1), new PlasticitySTInputSettings(new NonlinearDynamicsSTInputSettings())); SpikingSourceSTExcitatorySettings spikingSourceSTExcitatorySettings = new SpikingSourceSTExcitatorySettings(new URandomValueSettings(0, 1), new PlasticitySTExcitatorySettings(new NonlinearDynamicsSTExcitatorySettings())); SpikingSourceSTInhibitorySettings spikingSourceSTInhibitorySettings = new SpikingSourceSTInhibitorySettings(new URandomValueSettings(0, 1), new PlasticitySTInhibitorySettings(new NonlinearDynamicsSTInhibitorySettings())); SynapseSTInputSettings synapseSTInputSettings = new SynapseSTInputSettings(Synapse.SynapticDelayMethod.Random, maxInputDelay, null, spikingSourceSTInputSettings); SynapseSTExcitatorySettings synapseSTExcitatorySettings = new SynapseSTExcitatorySettings(Synapse.SynapticDelayMethod.Random, maxInternalDelay, 4, null, spikingSourceSTExcitatorySettings); SynapseSTInhibitorySettings synapseSTInhibitorySettings = new SynapseSTInhibitorySettings(Synapse.SynapticDelayMethod.Random, maxInternalDelay, 1, null, spikingSourceSTInhibitorySettings); SynapseSTSettings synapseSTCfg = new SynapseSTSettings(synapseSTInputSettings, synapseSTExcitatorySettings, synapseSTInhibitorySettings); SynapseSettings synapseCfg = new SynapseSettings(synapseSTCfg, null); //Create reservoir instance ReservoirInstanceSettings resInstCfg = new ReservoirInstanceSettings(GetResInstName(ResDesign.PureLSM, 0), resStructCfg.Name, new InputConnsSettings(inputConns), synapseCfg ); //Build and return SM configuration return(new StateMachineSettings(new NeuralPreprocessorSettings(InputEncoderCfg, new ReservoirStructuresSettings(resStructCfg), new ReservoirInstancesSettings(resInstCfg) ), ReadoutLayerCfg )); }
/// <summary> /// Builds the name of the neuron group. /// </summary> /// <param name="activationCfg">The activation function configuration.</param> private string BuildNeuronGroupName(IActivationSettings activationCfg) { return("Grp-" + BuildActivationName(activationCfg)); }
//Methods /// <summary> /// Builds the name of the specified activation function. /// </summary> /// <param name="activationCfg">The activation function configuration.</param> private string BuildActivationName(IActivationSettings activationCfg) { IActivation aFn = ActivationFactory.CreateAF(activationCfg, _rand); return(aFn.TypeOfActivation.ToString() + "-" + aFn.GetType().Name.Replace("Settings", string.Empty)); }
/// <summary> /// Creates the configuration of the feed forward network having an output layer and associated the resilient backpropagation trainer. /// </summary> /// <param name="aFnCfg">The configuration of the output layer activation.</param> /// <param name="numOfAttempts">The number of regression attempts.</param> /// <param name="numOfEpochs">The number of training epochs within an attempt.</param> public static FeedForwardNetworkSettings CreateSingleLayerFFNetCfg(IActivationSettings aFnCfg, int numOfAttempts, int numOfEpochs) { return(new FeedForwardNetworkSettings(aFnCfg, null, new RPropTrainerSettings(numOfAttempts, numOfEpochs))); }
/// <summary> /// Creates the instance of the activation function. /// </summary> /// <param name="cfg">The configuration.</param> /// <param name="rand">A random object to be used for randomly generated parameters.</param> public static IActivation CreateAF(IActivationSettings cfg, Random rand) { IActivation af; Type settingsType = cfg.GetType(); if (settingsType == typeof(AFSpikingAdExpIFSettings)) { AFSpikingAdExpIFSettings afs = (AFSpikingAdExpIFSettings)cfg; af = new AFSpikingAdExpIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.RheobaseV), rand.NextDouble(afs.FiringThresholdV), rand.NextDouble(afs.SharpnessDeltaT), rand.NextDouble(afs.AdaptationVoltageCoupling), rand.NextDouble(afs.AdaptationTimeConstant), rand.NextDouble(afs.AdaptationSpikeTriggeredIncrement), afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFAnalogBentIdentitySettings)) { af = new AFAnalogBentIdentity(); } else if (settingsType == typeof(AFAnalogElliotSettings)) { AFAnalogElliotSettings afs = (AFAnalogElliotSettings)cfg; af = new AFAnalogElliot(rand.NextDouble(afs.Slope)); } else if (settingsType == typeof(AFSpikingExpIFSettings)) { AFSpikingExpIFSettings afs = (AFSpikingExpIFSettings)cfg; af = new AFSpikingExpIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.RheobaseV), rand.NextDouble(afs.FiringThresholdV), rand.NextDouble(afs.SharpnessDeltaT), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFAnalogGaussianSettings)) { af = new AFAnalogGaussian(); } else if (settingsType == typeof(AFAnalogIdentitySettings)) { af = new AFAnalogIdentity(); } else if (settingsType == typeof(AFAnalogISRUSettings)) { AFAnalogISRUSettings afs = (AFAnalogISRUSettings)cfg; af = new AFAnalogISRU(rand.NextDouble(afs.Alpha)); } else if (settingsType == typeof(AFSpikingIzhikevichIFSettings)) { AFSpikingIzhikevichIFSettings afs = (AFSpikingIzhikevichIFSettings)cfg; af = new AFSpikingIzhikevichIF(rand.NextDouble(afs.RecoveryTimeScale), rand.NextDouble(afs.RecoverySensitivity), rand.NextDouble(afs.RecoveryReset), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFSpikingAutoIzhikevichIFSettings)) { double randomValue = rand.NextDouble().Power(2); AFSpikingAutoIzhikevichIFSettings afs = (AFSpikingAutoIzhikevichIFSettings)cfg; //Ranges af = new AFSpikingIzhikevichIF(0.02, 0.2, 8 + (-6 * randomValue), -70, -65 + (15 * randomValue), 30, afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFSpikingLeakyIFSettings)) { AFSpikingLeakyIFSettings afs = (AFSpikingLeakyIFSettings)cfg; af = new AFSpikingLeakyIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFAnalogLeakyReLUSettings)) { AFAnalogLeakyReLUSettings afs = (AFAnalogLeakyReLUSettings)cfg; af = new AFAnalogLeakyReLU(rand.NextDouble(afs.NegSlope)); } else if (settingsType == typeof(AFAnalogSigmoidSettings)) { af = new AFAnalogSigmoid(); } else if (settingsType == typeof(AFSpikingSimpleIFSettings)) { AFSpikingSimpleIFSettings afs = (AFSpikingSimpleIFSettings)cfg; af = new AFSpikingSimpleIF(rand.NextDouble(afs.Resistance), rand.NextDouble(afs.DecayRate), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods, rand.NextRangedUniformDouble(MinInitialVRatio, MaxInitialVRatio) ); } else if (settingsType == typeof(AFAnalogSincSettings)) { af = new AFAnalogSinc(); } else if (settingsType == typeof(AFAnalogSinusoidSettings)) { af = new AFAnalogSinusoid(); } else if (settingsType == typeof(AFAnalogSoftExponentialSettings)) { AFAnalogSoftExponentialSettings afs = (AFAnalogSoftExponentialSettings)cfg; af = new AFAnalogSoftExponential(rand.NextDouble(afs.Alpha)); } else if (settingsType == typeof(AFAnalogSoftMaxSettings)) { af = new AFAnalogSoftMax(); } else if (settingsType == typeof(AFAnalogSoftPlusSettings)) { af = new AFAnalogSoftPlus(); } else if (settingsType == typeof(AFAnalogSQNLSettings)) { af = new AFAnalogSQNL(); } else if (settingsType == typeof(AFAnalogTanHSettings)) { af = new AFAnalogTanH(); } else { throw new ArgumentException($"Unsupported activation function configuration: {settingsType.Name}"); } return(af); }