/// <summary> /// Instantiates the appropriate A2S coder. /// </summary> /// <param name="cfg">The coder configuration.</param> public static A2SCoderBase Create(RCNetBaseSettings cfg) { Type type = cfg.GetType(); if (type == typeof(A2SCoderGaussianReceptorsSettings)) { return(new A2SCoderGaussianReceptors((A2SCoderGaussianReceptorsSettings)cfg)); } else if (type == typeof(A2SCoderSignalStrengthSettings)) { return(new A2SCoderSignalStrength((A2SCoderSignalStrengthSettings)cfg)); } else if (type == typeof(A2SCoderUpDirArrowsSettings)) { return(new A2SCoderUpDirArrows((A2SCoderUpDirArrowsSettings)cfg)); } else if (type == typeof(A2SCoderDownDirArrowsSettings)) { return(new A2SCoderDownDirArrows((A2SCoderDownDirArrowsSettings)cfg)); } else { throw new ArgumentException($"Unexpected A2S coder type {type.Name}", "settings"); } }
/// <summary> /// Checks whether the specified configuration is an existing type of A2S coder configuration. /// </summary> /// <param name="cfg">The coder configuration.</param> public static bool CheckSettings(RCNetBaseSettings cfg) { Type type = cfg.GetType(); if (type == typeof(A2SCoderGaussianReceptorsSettings)) { return(true); } else if (type == typeof(A2SCoderSignalStrengthSettings)) { return(true); } else if (type == typeof(A2SCoderUpDirArrowsSettings)) { return(true); } else if (type == typeof(A2SCoderDownDirArrowsSettings)) { return(true); } else { return(false); } }
//Constructor /// <summary> /// Creates initialized instance from the specified xml file. /// </summary> /// <param name="fileName">The name of the xml file consisting of demo cases configurations.</param> public SMDemoSettings(string fileName) { //Validate xml file and load the document DocValidator validator = new DocValidator(); //Add RCNetTypes.xsd validator.AddSchema(RCNetBaseSettings.LoadRCNetTypesSchema()); //Add SMDemoSettings.xsd Assembly assembly = Assembly.GetExecutingAssembly(); using (Stream schemaStream = assembly.GetManifestResourceStream("DemoConsoleApp.SMDemoSettings.xsd")) { validator.AddSchema(schemaStream); } //Load the xml XDocument xmlDoc = validator.LoadXDocFromFile(fileName); //Parsing //Data folder XElement root = xmlDoc.Elements("demo").First(); DataFolder = root.Attribute("dataFolder").Value; //Demo cases definitions CaseCfgCollection = new List <CaseSettings>(); foreach (XElement demoCaseParamsElem in root.Elements("case")) { CaseCfgCollection.Add(new CaseSettings(demoCaseParamsElem, DataFolder)); } return; }
//Constructors /// <summary> /// Creates an initialized instance /// </summary> /// <param name="numOfNeurons">Number of hidden layer neurons</param> /// <param name="activationCfg">Layer activation configuration</param> public HiddenLayerSettings(int numOfNeurons, RCNetBaseSettings activationCfg) { NumOfNeurons = numOfNeurons; ActivationCfg = ActivationFactory.DeepCloneActivationSettings(activationCfg); Check(); return; }
/// <summary> /// Instantiates generator of proper type according to settings /// </summary> /// <param name="settings">Generator configuration</param> public static IGenerator Create(RCNetBaseSettings settings) { Type cfgType = settings.GetType(); if (cfgType == typeof(PulseGeneratorSettings)) { return(new PulseGenerator((PulseGeneratorSettings)settings)); } else if (cfgType == typeof(RandomValueSettings)) { return(new RandomGenerator((RandomValueSettings)settings)); } else if (cfgType == typeof(SinusoidalGeneratorSettings)) { return(new SinusoidalGenerator((SinusoidalGeneratorSettings)settings)); } else if (cfgType == typeof(MackeyGlassGeneratorSettings)) { return(new MackeyGlassGenerator((MackeyGlassGeneratorSettings)settings)); } else { throw new ArgumentException($"Unexpected transformer configuration {cfgType.Name}", "settings"); } }
/// <summary> /// The deep copy constructor /// </summary> /// <param name="source">Source instance</param> public FeedForwardNetworkSettings(FeedForwardNetworkSettings source) { OutputActivationCfg = ActivationFactory.DeepCloneActivationSettings(source.OutputActivationCfg); OutputRange = source.OutputRange.DeepClone(); HiddenLayersCfg = (HiddenLayersSettings)source.HiddenLayersCfg.DeepClone(); TrainerCfg = source.TrainerCfg.DeepClone(); return; }
//Static methods /// <summary> /// Fuction tests if specified activation can be used in FF network /// </summary> /// <param name="activationSettings">Activation settings</param> /// <param name="outputRange">Returned range of the activation function</param> public static bool IsAllowedActivation(RCNetBaseSettings activationSettings, out Interval outputRange) { outputRange = ActivationFactory.GetInfo(activationSettings, out bool stateless, out bool supportsDerivative); if (!stateless || !supportsDerivative) { return(false); } return(true); }
/// <summary> /// Collects basic information about activation function corresponding to given configuration /// </summary> /// <param name="activationSettings">Activation function settings</param> /// <param name="stateless">Indicates whether the activation function is stateless</param> /// <param name="supportsDerivative">Indicates whether the activation function supports derivative</param> /// <returns>Output range of the activation function</returns> public static Interval GetInfo(RCNetBaseSettings activationSettings, out bool stateless, out bool supportsDerivative) { IActivationFunction af = Create(activationSettings, new Random()); Interval outputRange = af.OutputRange.DeepClone(); stateless = af.Stateless; supportsDerivative = af.SupportsDerivative; return(outputRange); }
/// <summary> /// Creates an initialized instance from given xml element. /// </summary> /// <param name="elem">Xml element containing the settings.</param> public HiddenLayerSettings(XElement elem) { //Validation XElement settingsElem = Validate(elem, XsdTypeName); //Parsing NumOfNeurons = int.Parse(settingsElem.Attribute("neurons").Value); ActivationCfg = ActivationFactory.LoadSettings(settingsElem.Elements().First()); Check(); return; }
/// <summary> /// Returns collection of names of the fields associated with the transformed field /// </summary> /// <param name="settings">Transformed field configuration</param> public static List <string> GetAssociatedNames(RCNetBaseSettings settings) { List <string> names = new List <string>(); Type cfgType = settings.GetType(); if (cfgType == typeof(DiffTransformerSettings)) { names.Add(((DiffTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(CDivTransformerSettings)) { names.Add(((CDivTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(LogTransformerSettings)) { names.Add(((LogTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(ExpTransformerSettings)) { names.Add(((ExpTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(PowerTransformerSettings)) { names.Add(((PowerTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(YeoJohnsonTransformerSettings)) { names.Add(((YeoJohnsonTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(MWStatTransformerSettings)) { names.Add(((MWStatTransformerSettings)settings).InputFieldName); } else if (cfgType == typeof(MulTransformerSettings)) { names.Add(((MulTransformerSettings)settings).XInputFieldName); names.Add(((MulTransformerSettings)settings).YInputFieldName); } else if (cfgType == typeof(DivTransformerSettings)) { names.Add(((DivTransformerSettings)settings).XInputFieldName); names.Add(((DivTransformerSettings)settings).YInputFieldName); } else if (cfgType == typeof(LinearTransformerSettings)) { names.Add(((LinearTransformerSettings)settings).XInputFieldName); names.Add(((LinearTransformerSettings)settings).YInputFieldName); } else { throw new ArgumentException($"Unexpected transformer configuration {cfgType.Name}", "settings"); } return(names); }
//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="outputActivationCfg">Configuration of the output layer activation function.</param> /// <param name="hiddenLayersCfg">The configuration of the hidden layers. Hidden layers are optional.</param> /// <param name="trainerCfg">The configuration of the associated trainer.</param> public FeedForwardNetworkSettings(IActivationSettings outputActivationCfg, HiddenLayersSettings hiddenLayersCfg, RCNetBaseSettings trainerCfg ) { OutputActivationCfg = (IActivationSettings)outputActivationCfg.DeepClone(); HiddenLayersCfg = hiddenLayersCfg == null ? new HiddenLayersSettings() : (HiddenLayersSettings)hiddenLayersCfg.DeepClone(); TrainerCfg = trainerCfg.DeepClone(); Check(); return; }
//Constructors /// <summary> /// Creates an initialized instance /// </summary> /// <param name="outputActivationCfg">Output layer activation configuration</param> /// <param name="hiddenLayersCfg">Hidden layers configuration. Hidden layers are optional.</param> /// <param name="trainerCfg">Configuration of associated trainer</param> public FeedForwardNetworkSettings(RCNetBaseSettings outputActivationCfg, HiddenLayersSettings hiddenLayersCfg, RCNetBaseSettings trainerCfg ) { OutputActivationCfg = ActivationFactory.DeepCloneActivationSettings(outputActivationCfg); OutputRange = ActivationFactory.GetInfo(OutputActivationCfg, out _, out _); HiddenLayersCfg = hiddenLayersCfg == null ? new HiddenLayersSettings() : (HiddenLayersSettings)hiddenLayersCfg.DeepClone(); TrainerCfg = trainerCfg.DeepClone(); Check(); return; }
//Constructors /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="name">The name of the generated field.</param> /// <param name="generatorCfg">The configuration of an associated generator.</param> /// <param name="routeToReadout">Specifies whether to route the generated field to the readout layer.</param> /// <param name="featureFilterCfg">The configuration of the real feature filter.</param> public GeneratedFieldSettings(string name, RCNetBaseSettings generatorCfg, bool routeToReadout = DefaultRouteToReadout, RealFeatureFilterSettings featureFilterCfg = null ) { Name = name; GeneratorCfg = generatorCfg.DeepClone(); RouteToReadout = routeToReadout; FeatureFilterCfg = featureFilterCfg == null ? null : (RealFeatureFilterSettings)featureFilterCfg.DeepClone(); Check(); return; }
/// <summary> /// Instantiates transformer of proper type according to settings /// </summary> /// <param name="fieldNames">Collection of names of all available input fields</param> /// <param name="settings">Transformer configuration</param> public static ITransformer Create(List <string> fieldNames, RCNetBaseSettings settings) { Type cfgType = settings.GetType(); if (cfgType == typeof(DiffTransformerSettings)) { return(new DiffTransformer(fieldNames, (DiffTransformerSettings)settings)); } else if (cfgType == typeof(CDivTransformerSettings)) { return(new CDivTransformer(fieldNames, (CDivTransformerSettings)settings)); } else if (cfgType == typeof(LogTransformerSettings)) { return(new LogTransformer(fieldNames, (LogTransformerSettings)settings)); } else if (cfgType == typeof(ExpTransformerSettings)) { return(new ExpTransformer(fieldNames, (ExpTransformerSettings)settings)); } else if (cfgType == typeof(PowerTransformerSettings)) { return(new PowerTransformer(fieldNames, (PowerTransformerSettings)settings)); } else if (cfgType == typeof(YeoJohnsonTransformerSettings)) { return(new YeoJohnsonTransformer(fieldNames, (YeoJohnsonTransformerSettings)settings)); } else if (cfgType == typeof(MWStatTransformerSettings)) { return(new MWStatTransformer(fieldNames, (MWStatTransformerSettings)settings)); } else if (cfgType == typeof(MulTransformerSettings)) { return(new MulTransformer(fieldNames, (MulTransformerSettings)settings)); } else if (cfgType == typeof(DivTransformerSettings)) { return(new DivTransformer(fieldNames, (DivTransformerSettings)settings)); } else if (cfgType == typeof(LinearTransformerSettings)) { return(new LinearTransformer(fieldNames, (LinearTransformerSettings)settings)); } else { throw new ArgumentException($"Unexpected transformer configuration {cfgType.Name}", "settings"); } }
/// <summary> /// Creates configuration of group of spiking neurons having specified spiking activation. /// </summary> /// <param name="activationCfg">Activation function configuration</param> /// <param name="heCfg">Configuration of the homogenous excitability</param> /// <param name="steadyBias">Constant bias (0 means bias is not required)</param> private SpikingNeuronGroupSettings CreateSpikingGroup(RCNetBaseSettings activationCfg, HomogenousExcitabilitySettings heCfg, double steadyBias = 0d) { //Bias configuration RandomValueSettings biasCfg = steadyBias == 0 ? null : new RandomValueSettings(steadyBias, steadyBias); //Create neuron group configuration SpikingNeuronGroupSettings groupCfg = new SpikingNeuronGroupSettings(GetNeuronGroupName(activationCfg), 1d, activationCfg, heCfg, biasCfg, null ); return(groupCfg); }
//Constructors /// <summary> /// Creates an initialized instance /// </summary> /// <param name="name">Transformed field name</param> /// <param name="transformerCfg">Configuration of associated transformer</param> /// <param name="routeToReadout">Specifies whether to route transformed field to readout layer together with other predictors</param> /// <param name="featureFilterCfg">Configuration of real feature filter</param> /// <param name="spikingCodingCfg">Configuration of spiking coding neurons</param> public TransformedFieldSettings(string name, RCNetBaseSettings transformerCfg, bool routeToReadout = DefaultRouteToReadout, RealFeatureFilterSettings featureFilterCfg = null, SpikeCodeSettings spikingCodingCfg = null ) { Name = name; TransformerCfg = transformerCfg.DeepClone(); RouteToReadout = routeToReadout; FeatureFilterCfg = featureFilterCfg == null ? null : (RealFeatureFilterSettings)featureFilterCfg.DeepClone(); SpikingCodingCfg = spikingCodingCfg == null ? null : (SpikeCodeSettings)spikingCodingCfg.DeepClone(); Check(); return; }
/// <summary> /// Creates an initialized instance. /// </summary> /// <param name="elem">Xml element containing the initialization settings</param> public FeedForwardNetworkSettings(XElement elem) { //Validation XElement settingsElem = Validate(elem, XsdTypeName); //Parsing OutputActivationCfg = ActivationFactory.LoadSettings(settingsElem.Elements().First()); OutputRange = ActivationFactory.GetInfo(OutputActivationCfg, out _, out _); //Hidden layers XElement hiddenLayersElem = settingsElem.Elements("hiddenLayers").FirstOrDefault(); if (hiddenLayersElem != null) { HiddenLayersCfg = new HiddenLayersSettings(hiddenLayersElem); } else { HiddenLayersCfg = new HiddenLayersSettings(); } //Trainer configuration TrainerCfg = null; foreach (XElement candidate in settingsElem.Elements()) { if (candidate.Name.LocalName == "qrdRegrTrainer") { TrainerCfg = new QRDRegrTrainerSettings(candidate); break; } else if (candidate.Name.LocalName == "ridgeRegrTrainer") { TrainerCfg = new RidgeRegrTrainerSettings(candidate); break; } else if (candidate.Name.LocalName == "elasticRegrTrainer") { TrainerCfg = new ElasticRegrTrainerSettings(candidate); break; } else if (candidate.Name.LocalName == "resPropTrainer") { TrainerCfg = new RPropTrainerSettings(candidate); break; } } Check(); return; }
//Constructors /// <summary> /// Creates an initialized instance /// </summary> /// <param name="name">Name of the neuron group</param> /// <param name="relShare">Specifies how big relative portion of pool's neurons is formed by this group of the neurons</param> /// <param name="activationCfg">Common activation function settings of the groupped neurons</param> /// <param name="homogenousExcitabilityCfg">Configuration of the neuron's homogenous excitability</param> /// <param name="biasCfg">Each neuron within the group receives constant input bias. Value of the neuron's bias is driven by this random settings</param> /// <param name="predictorsCfg">Configuration of the predictors</param> public SpikingNeuronGroupSettings(string name, double relShare, RCNetBaseSettings activationCfg, HomogenousExcitabilitySettings homogenousExcitabilityCfg = null, RandomValueSettings biasCfg = null, PredictorsSettings predictorsCfg = null ) { Name = name; RelShare = relShare; ActivationCfg = activationCfg.DeepClone(); HomogenousExcitabilityCfg = homogenousExcitabilityCfg == null ? new HomogenousExcitabilitySettings() : (HomogenousExcitabilitySettings)homogenousExcitabilityCfg.DeepClone(); BiasCfg = biasCfg == null ? null : (RandomValueSettings)biasCfg.DeepClone(); PredictorsCfg = predictorsCfg == null ? null : (PredictorsSettings)predictorsCfg.DeepClone(); Check(); return; }
//Constructors /// <summary> /// Creates an initialized instance /// </summary> /// <param name="name">Name of the neuron group</param> /// <param name="relShare">Specifies how big relative portion of pool's neurons is formed by this group of the neurons</param> /// <param name="activationCfg">Common activation function settings of the groupped neurons</param> /// <param name="firingThreshold"> /// A number between 0 and 1 (LT1). Every time the new normalized activation value is higher than the previous /// normalized activation value by at least the threshold, it is evaluated as a firing event. /// </param> /// <param name="thresholdMaxRefDeepness">Maximum deepness of historical normalized activation value to be compared with current normalized activation value when evaluating firing event.</param> /// <param name="biasCfg">Each neuron within the group receives constant input bias. Value of the neuron's bias is driven by this random settings</param> /// <param name="retainmentCfg">Neurons' retainment property configuration</param> /// <param name="predictorsCfg">Configuration of the predictors</param> public AnalogNeuronGroupSettings(string name, double relShare, RCNetBaseSettings activationCfg, double firingThreshold = DefaultFiringThreshold, int thresholdMaxRefDeepness = DefaultThresholdMaxRefDeepness, RandomValueSettings biasCfg = null, RetainmentSettings retainmentCfg = null, PredictorsSettings predictorsCfg = null ) { Name = name; RelShare = relShare; ActivationCfg = activationCfg.DeepClone(); FiringThreshold = firingThreshold; ThresholdMaxRefDeepness = thresholdMaxRefDeepness; BiasCfg = biasCfg == null ? null : (RandomValueSettings)biasCfg.DeepClone(); RetainmentCfg = retainmentCfg == null ? null : (RetainmentSettings)retainmentCfg.DeepClone(); PredictorsCfg = predictorsCfg == null ? null : (PredictorsSettings)predictorsCfg.DeepClone(); Check(); return; }
/// <summary> /// Creates configuration of group of analog neurons having specified analog activation. /// </summary> /// <param name="activationCfg">Activation function configuration</param> /// <param name="maxAbsBias">Maximum absolute value of the bias (0 means bias is not required)</param> /// <param name="maxRetainmentStrength">Maximum retainment strength (0 means retainment property is not required)</param> private AnalogNeuronGroupSettings CreateAnalogGroup(RCNetBaseSettings activationCfg, double maxAbsBias = 0d, double maxRetainmentStrength = 0d ) { //Bias configuration RandomValueSettings biasCfg = maxAbsBias == 0 ? null : new RandomValueSettings(-maxAbsBias, maxAbsBias); //Retainment configuration const double RetainmentDensity = 1d; RetainmentSettings retainmentCfg = maxRetainmentStrength == 0 ? null : new RetainmentSettings(RetainmentDensity, new URandomValueSettings(0, maxRetainmentStrength)); //Create neuron group configuration AnalogNeuronGroupSettings groupCfg = new AnalogNeuronGroupSettings(GetNeuronGroupName(activationCfg), 1d, activationCfg, AnalogNeuronGroupSettings.DefaultFiringThreshold, AnalogNeuronGroupSettings.DefaultThresholdMaxRefDeepness, biasCfg, retainmentCfg, null ); return(groupCfg); }
/// <summary> /// Creates configuration of single output layer FF network structure with associated resilient back propagation trainer /// </summary> /// <param name="aFnCfg">Activation of output layer</param> /// <param name="numOfAttempts">Number of regression attempts. Each readout network will try to learn numOfAttempts times</param> /// <param name="numOfEpochs">Number of training epochs within an attempt</param> public static FeedForwardNetworkSettings CreateSingleLayerRegrNet(RCNetBaseSettings aFnCfg, int numOfAttempts, int numOfEpochs) { return(new FeedForwardNetworkSettings(aFnCfg, null, new RPropTrainerSettings(numOfAttempts, numOfEpochs))); }
/// <summary> /// Returns the deep clone of the activation function settings /// </summary> /// <param name="settings"> /// Specific activation function settings /// </param> public static RCNetBaseSettings DeepCloneActivationSettings(RCNetBaseSettings settings) { Type settingsType = settings.GetType(); if (settingsType == typeof(AdExpIFSettings)) { return(((AdExpIFSettings)settings).DeepClone()); } else if (settingsType == typeof(SQNLSettings)) { return(((SQNLSettings)settings).DeepClone()); } else if (settingsType == typeof(BentIdentitySettings)) { return(((BentIdentitySettings)settings).DeepClone()); } else if (settingsType == typeof(ElliotSettings)) { return(((ElliotSettings)settings).DeepClone()); } else if (settingsType == typeof(ExpIFSettings)) { return(((ExpIFSettings)settings).DeepClone()); } else if (settingsType == typeof(GaussianSettings)) { return(((GaussianSettings)settings).DeepClone()); } else if (settingsType == typeof(IdentitySettings)) { return(((IdentitySettings)settings).DeepClone()); } else if (settingsType == typeof(ISRUSettings)) { return(((ISRUSettings)settings).DeepClone()); } else if (settingsType == typeof(IzhikevichIFSettings)) { return(((IzhikevichIFSettings)settings).DeepClone()); } else if (settingsType == typeof(AutoIzhikevichIFSettings)) { return(((AutoIzhikevichIFSettings)settings).DeepClone()); } else if (settingsType == typeof(LeakyIFSettings)) { return(((LeakyIFSettings)settings).DeepClone()); } else if (settingsType == typeof(LeakyReLUSettings)) { return(((LeakyReLUSettings)settings).DeepClone()); } else if (settingsType == typeof(LeakyReLUSettings)) { return(((LeakyReLUSettings)settings).DeepClone()); } else if (settingsType == typeof(SigmoidSettings)) { return(((SigmoidSettings)settings).DeepClone()); } else if (settingsType == typeof(SimpleIFSettings)) { return(((SimpleIFSettings)settings).DeepClone()); } else if (settingsType == typeof(SincSettings)) { return(((SincSettings)settings).DeepClone()); } else if (settingsType == typeof(SinusoidSettings)) { return(((SinusoidSettings)settings).DeepClone()); } else if (settingsType == typeof(SoftExponentialSettings)) { return(((SoftExponentialSettings)settings).DeepClone()); } else if (settingsType == typeof(SoftPlusSettings)) { return(((SoftPlusSettings)settings).DeepClone()); } else if (settingsType == typeof(TanHSettings)) { return(((TanHSettings)settings).DeepClone()); } else { throw new ArgumentException($"Unsupported activation function settings: {settingsType.Name}"); } }
/// <summary> /// Creates an instance of the activation function according to given settings. /// </summary> /// <param name="settings">Specific activation function settings </param> /// <param name="rand">Random object to be used for randomly generated parameters</param> public static IActivationFunction Create(RCNetBaseSettings settings, Random rand) { IActivationFunction af; Type settingsType = settings.GetType(); if (settingsType == typeof(AdExpIFSettings)) { AdExpIFSettings afs = (AdExpIFSettings)settings; af = new AdExpIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.RheobaseV), rand.NextDouble(afs.FiringThresholdV), rand.NextDouble(afs.SharpnessDeltaT), rand.NextDouble(afs.AdaptationVoltageCoupling), rand.NextDouble(afs.AdaptationTimeConstant), rand.NextDouble(afs.AdaptationSpikeTriggeredIncrement), afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration ); } else if (settingsType == typeof(BentIdentitySettings)) { af = new BentIdentity(); } else if (settingsType == typeof(ElliotSettings)) { ElliotSettings afs = (ElliotSettings)settings; af = new Elliot(rand.NextDouble(afs.Slope)); } else if (settingsType == typeof(ExpIFSettings)) { ExpIFSettings afs = (ExpIFSettings)settings; af = new ExpIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.RheobaseV), rand.NextDouble(afs.FiringThresholdV), rand.NextDouble(afs.SharpnessDeltaT), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration ); } else if (settingsType == typeof(GaussianSettings)) { af = new Gaussian(); } else if (settingsType == typeof(IdentitySettings)) { af = new Identity(); } else if (settingsType == typeof(ISRUSettings)) { ISRUSettings afs = (ISRUSettings)settings; af = new ISRU(rand.NextDouble(afs.Alpha)); } else if (settingsType == typeof(IzhikevichIFSettings)) { IzhikevichIFSettings afs = (IzhikevichIFSettings)settings; af = new IzhikevichIF(rand.NextDouble(afs.RecoveryTimeScale), rand.NextDouble(afs.RecoverySensitivity), rand.NextDouble(afs.RecoveryReset), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration ); } else if (settingsType == typeof(AutoIzhikevichIFSettings)) { double randomValue = rand.NextDouble().Power(2); AutoIzhikevichIFSettings afs = (AutoIzhikevichIFSettings)settings; //Ranges af = new IzhikevichIF(0.02, 0.2, 8 + (-6 * randomValue), -70, -65 + (15 * randomValue), 30, afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration ); } else if (settingsType == typeof(LeakyIFSettings)) { LeakyIFSettings afs = (LeakyIFSettings)settings; af = new LeakyIF(rand.NextDouble(afs.TimeScale), rand.NextDouble(afs.Resistance), rand.NextDouble(afs.RestV), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods, afs.SolverMethod, afs.SolverCompSteps, afs.StimuliDuration ); } else if (settingsType == typeof(LeakyReLUSettings)) { LeakyReLUSettings afs = (LeakyReLUSettings)settings; af = new LeakyReLU(rand.NextDouble(afs.NegSlope)); } else if (settingsType == typeof(SigmoidSettings)) { af = new Sigmoid(); } else if (settingsType == typeof(SimpleIFSettings)) { SimpleIFSettings afs = (SimpleIFSettings)settings; af = new SimpleIF(rand.NextDouble(afs.Resistance), rand.NextDouble(afs.DecayRate), rand.NextDouble(afs.ResetV), rand.NextDouble(afs.FiringThresholdV), afs.RefractoryPeriods ); } else if (settingsType == typeof(SincSettings)) { af = new Sinc(); } else if (settingsType == typeof(SinusoidSettings)) { af = new Sinusoid(); } else if (settingsType == typeof(SoftExponentialSettings)) { SoftExponentialSettings afs = (SoftExponentialSettings)settings; af = new SoftExponential(rand.NextDouble(afs.Alpha)); } else if (settingsType == typeof(SoftPlusSettings)) { af = new SoftPlus(); } else if (settingsType == typeof(SQNLSettings)) { af = new SQNL(); } else if (settingsType == typeof(TanHSettings)) { af = new TanH(); } else { throw new ArgumentException($"Unsupported activation function settings: {settingsType.Name}"); } //* //Set random initial membrane potential for spiking activation if (!af.Stateless && af.TypeOfActivation == ActivationType.Spiking) { af.SetInitialInternalState(rand.NextRangedUniformDouble(0.05, 0.95)); } //*/ return(af); }
/// <summary> /// Creates StateMachine configuration following pure LSM design /// </summary> /// <param name="proportionsCfg">LSM pool proportions</param> /// <param name="aFnCfg">Spiking activation function configuration</param> /// <param name="hes">Homogenous excitability configuration</param> /// <param name="inputConnectionDensity">Density of the input field connections to hidden neurons</param> /// <param name="maxInputDelay">Maximum delay of input synapse</param> /// <param name="interconnectionDensity">Density of the hidden neurons interconnection</param> /// <param name="maxInternalDelay">Maximum delay of internal synapse</param> /// <param name="steadyBias">Constant bias (0 means bias is not required)</param> /// <param name="predictorsParamsCfg">Predictors parameters (use null for defaults)</param> /// <param name="allowedPredictor">Allowed predictor(s)</param> public StateMachineSettings CreatePureLSMCfg(ProportionsSettings proportionsCfg, RCNetBaseSettings aFnCfg, HomogenousExcitabilitySettings hes, double inputConnectionDensity, int maxInputDelay, double interconnectionDensity, int maxInternalDelay, double steadyBias, PredictorsParamsSettings predictorsParamsCfg, params PredictorsProvider.PredictorID[] allowedPredictor ) { //Activation check if (ActivationFactory.Create(aFnCfg, new Random()).TypeOfActivation != ActivationType.Spiking) { throw new ArgumentException("Specified activation must be spiking.", "aFnCfg"); } //One neuron group SpikingNeuronGroupSettings grp = CreateSpikingGroup(aFnCfg, hes, steadyBias); //Simple spiking pool PoolSettings poolCfg = new PoolSettings(GetPoolName(ActivationContent.Spiking, 0), proportionsCfg, new NeuronGroupsSettings(grp), new InterconnSettings(new RandomSchemaSettings(interconnectionDensity, 0d, false, false)) ); //Simple reservoir structure ReservoirStructureSettings resStructCfg = new ReservoirStructureSettings(GetResStructName(ActivationContent.Spiking, 0), new PoolsSettings(poolCfg) ); //Input connections configuration List <InputConnSettings> inputConns = new List <InputConnSettings>(InputCfg.VaryingFieldsCfg.ExternalFieldsCfg.FieldCfgCollection.Count); foreach (ExternalFieldSettings fieldCfg in InputCfg.VaryingFieldsCfg.ExternalFieldsCfg.FieldCfgCollection) { InputConnSettings inputConnCfg = new InputConnSettings(fieldCfg.Name, poolCfg.Name, inputConnectionDensity, 0 ); inputConns.Add(inputConnCfg); } //Synapse general configuration SynapseSTInputSettings synapseSTInputSettings = new SynapseSTInputSettings(Synapse.SynapticDelayMethod.Random, maxInputDelay); SynapseSTExcitatorySettings synapseSTExcitatorySettings = new SynapseSTExcitatorySettings(Synapse.SynapticDelayMethod.Random, maxInternalDelay); SynapseSTInhibitorySettings synapseSTInhibitorySettings = new SynapseSTInhibitorySettings(Synapse.SynapticDelayMethod.Random, maxInternalDelay); SynapseSTSettings synapseSTCfg = new SynapseSTSettings(synapseSTInputSettings, synapseSTExcitatorySettings, synapseSTInhibitorySettings); SynapseSettings synapseCfg = new SynapseSettings(synapseSTCfg, null); //Initially set all switches to false - all available predictors are forbidden bool[] predictorSwitches = new bool[PredictorsProvider.NumOfSupportedPredictors]; predictorSwitches.Populate(false); //Enable specified predictors foreach (PredictorsProvider.PredictorID predictorID in allowedPredictor) { predictorSwitches[(int)predictorID] = true; } //Create predictors configuration using default params PredictorsSettings predictorsCfg = new PredictorsSettings(predictorSwitches, predictorsParamsCfg); //Create reservoir instance ReservoirInstanceSettings resInstCfg = new ReservoirInstanceSettings(GetResInstName(ResDesign.PureLSM, 0), resStructCfg.Name, new InputConnsSettings(inputConns), synapseCfg, predictorsCfg ); //Build and return SM configuration return(new StateMachineSettings(new NeuralPreprocessorSettings(InputCfg, new ReservoirStructuresSettings(resStructCfg), new ReservoirInstancesSettings(resInstCfg) ), ReadoutCfg )); }
//Methods /// <summary> /// Builds name of the specified activation function /// </summary> /// <param name="activationCfg">Activation function configuration</param> private string GetActivationName(RCNetBaseSettings activationCfg) { IActivationFunction aFn = ActivationFactory.Create(activationCfg, _rand); return(aFn.TypeOfActivation.ToString() + "-" + aFn.GetType().Name.Replace("Settings", string.Empty)); }
/// <summary> /// Copy constructor /// </summary> /// <param name="source">Source instance</param> public HiddenLayerSettings(HiddenLayerSettings source) { NumOfNeurons = source.NumOfNeurons; ActivationCfg = ActivationFactory.DeepCloneActivationSettings(source.ActivationCfg); return; }
/// <summary> /// Creates configuration of single output layer FF network structure with associated resilient back propagation trainer /// </summary> /// <param name="hiddenLayerSize">Number of hidden layer neurons</param> /// <param name="hiddenLayerAFnCfg">Activation of hidden layer</param> /// <param name="numOfHiddenLayers">Number of hidden layers</param> /// <param name="numOfAttempts">Number of regression attempts. Each readout network will try to learn numOfAttempts times</param> /// <param name="numOfEpochs">Number of training epochs within an attempt</param> public static FeedForwardNetworkSettings CreateMultiLayerRegrNet(int hiddenLayerSize, RCNetBaseSettings hiddenLayerAFnCfg, int numOfHiddenLayers, int numOfAttempts, int numOfEpochs) { List <HiddenLayerSettings> hiddenLayerCollection = new List <HiddenLayerSettings>(numOfHiddenLayers); for (int i = 0; i < numOfHiddenLayers; i++) { hiddenLayerCollection.Add(new HiddenLayerSettings(hiddenLayerSize, hiddenLayerAFnCfg)); } HiddenLayersSettings hiddenLayersCfg = new HiddenLayersSettings(hiddenLayerCollection); return(new FeedForwardNetworkSettings(new IdentitySettings(), hiddenLayersCfg, new RPropTrainerSettings(numOfAttempts, numOfEpochs))); }
/// <summary> /// Builds name of neuron group /// </summary> /// <param name="activationCfg">Activation function configuration</param> private string GetNeuronGroupName(RCNetBaseSettings activationCfg) { return("Grp-" + GetActivationName(activationCfg)); }