/// <summary> /// Creates the instance and initialize it from given xml element. /// This is the preferred way to instantiate reservoir settings. /// </summary> /// <param name="elem"> /// Xml data containing feed forward network settings. /// Content of xml element is always validated against the xml schema. /// </param> public FeedForwardNetworkSettings(XElement elem) { //Validation ElemValidator validator = new ElemValidator(); Assembly assemblyRCNet = Assembly.GetExecutingAssembly(); validator.AddXsdFromResources(assemblyRCNet, "RCNet.Neural.Network.FF.FeedForwardNetworkSettings.xsd"); validator.AddXsdFromResources(assemblyRCNet, "RCNet.RCNetTypes.xsd"); XElement feedForwardNetworkSettingsElem = validator.Validate(elem, "rootElem"); //Parsing OutputLayerActivation = new ActivationSettings(feedForwardNetworkSettingsElem.Descendants("outputActivation").First()); if (!IsAllowedActivation(OutputLayerActivation)) { throw new ApplicationException($"Activation {OutputLayerActivation.FunctionType} can't be used in FF network. Activation function has to be stateless and has to support derivative calculation."); } RegressionMethod = ParseTrainingMethodType(feedForwardNetworkSettingsElem.Attribute("regressionMethod").Value); //Hidden layers HiddenLayerCollection = new List <HiddenLayerSettings>(); XElement hiddenLayersElem = feedForwardNetworkSettingsElem.Descendants("hiddenLayers").FirstOrDefault(); if (hiddenLayersElem != null) { foreach (XElement layerElem in hiddenLayersElem.Descendants("layer")) { HiddenLayerCollection.Add(new HiddenLayerSettings(layerElem)); } } //Trainers LinRegrTrainerCfg = null; RPropTrainerCfg = null; switch (RegressionMethod) { case TrainingMethodType.Linear: XElement linRegrTrainerElem = feedForwardNetworkSettingsElem.Descendants("linRegrTrainer").FirstOrDefault(); if (linRegrTrainerElem != null) { LinRegrTrainerCfg = new LinRegrTrainerSettings(linRegrTrainerElem); } else { LinRegrTrainerCfg = new LinRegrTrainerSettings(); } break; case TrainingMethodType.Resilient: XElement resPropTrainerElem = feedForwardNetworkSettingsElem.Descendants("resPropTrainer").FirstOrDefault(); if (resPropTrainerElem != null) { RPropTrainerCfg = new RPropTrainerSettings(resPropTrainerElem); } else { RPropTrainerCfg = new RPropTrainerSettings(); } break; } return; }
/// <summary> /// Creates the instance and initializes it from given xml element. /// </summary> /// <param name="elem"> /// Xml data containing the settings. /// </param> public HiddenLayerSettings(XElement elem) { NumOfNeurons = int.Parse(elem.Attribute("neurons").Value); Activation = new ActivationSettings(elem.Descendants("activation").First()); if (!IsAllowedActivation(Activation)) { throw new ApplicationException($"Activation {Activation.FunctionType} can't be used in FF network. Activation has to be time independent and has to support derivative."); } return; }
/// <summary> /// Copy constructor /// </summary> /// <param name="source">Source instance</param> public HiddenLayerSettings(HiddenLayerSettings source) { NumOfNeurons = source.NumOfNeurons; Activation = null; if (source.Activation != null) { Activation = source.Activation.DeepClone(); } return; }
//Methods //Static methods /// <summary> /// Fuction checks if specified activation can be used in FF network /// </summary> /// <param name="activationSettings">Activation settings</param> /// <returns></returns> public static bool IsAllowedActivation(ActivationSettings activationSettings) { IActivationFunction af = ActivationFactory.Create(activationSettings); if (!af.Stateless || !af.SupportsComputeDerivativeMethod) { return(false); } return(true); }
public Layer(int nCount, int index, ActivationSettings activationSettings) { NCount = nCount; Index = index; ActivationType = activationSettings.Type(); // Activation Setup switch (activationSettings.Type()) { case EActivationType.Invalid: Activation = null; throw new ArgumentException("Activation Type Invalid."); case EActivationType.Arctan: Activation = new Arctan(); break; case EActivationType.BinaryStep: Activation = new BinaryStep(); break; case EActivationType.BipolarSigmoid: Activation = new BipolarSigmoid(); break; case EActivationType.ELU: Activation = new ELU((ELUSettings)activationSettings); break; case EActivationType.HardSigmoid: Activation = new HardSigmoid(); break; case EActivationType.HardTanh: Activation = new HardTanh(); break; case EActivationType.Identity: Activation = new Identity(); break; case EActivationType.Logit: Activation = new Logit(); break; case EActivationType.LReLU: Activation = new LReLU((LReLUSettings)activationSettings); break; case EActivationType.Mish: Activation = new Mish(); break; case EActivationType.ReLU: Activation = new ReLU(); break; case EActivationType.SeLU: Activation = new SeLU(); break; case EActivationType.Sigmoid: Activation = new Sigmoid(); break; case EActivationType.Softmax: Activation = new Softmax(); break; case EActivationType.Softplus: Activation = new Softplus(); break; case EActivationType.Softsign: Activation = new Softsign(); break; case EActivationType.Tanh: Activation = new Tanh(); break; default: throw new ArgumentException("Activation Type Invalid."); } }
public bool CreateLayer(int nCount, ELayerType type, ActivationSettings activationSettings) { Layer.Utility.Layer layer; switch (type) { case ELayerType.Invalid: throw new ArgumentException("Invalid \"type\" argument."); case ELayerType.AveragePooling: layer = new AveragePooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.AverageUnpooling: layer = new AverageUnpooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Convolutional: layer = new Convolutional(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Deconvolutional: layer = new Deconvolutional(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Dropout: layer = new Dropout(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.FullyConnected: layer = new FullyConnected(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.GatedRecurrent: layer = new GatedRecurrent(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.LSTM: layer = new LSTM(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.MaxPooling: layer = new MaxPooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.MaxUnpooling: layer = new MaxUnpooling(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); case ELayerType.Recurrent: layer = new Recurrent(nCount, Layers.Count, activationSettings); Layers.Add(layer); return(true); default: throw new ArgumentException("Invalid \"type\" argument."); } }
//Constructors /// <summary> /// Creates an uninitialized instance /// </summary> public HiddenLayerSettings() { NumOfNeurons = 0; Activation = null; return; }
/// <summary> /// Creates the instance and initialize it from given xml element. /// This is the preferred way to instantiate pool settings. /// </summary> /// <param name="elem"> /// Xml data containing pool settings. /// Content of xml element is always validated against the xml schema. /// </param> public PoolSettings(XElement elem) { //Validation ElemValidator validator = new ElemValidator(); Assembly assemblyRCNet = Assembly.GetExecutingAssembly(); validator.AddXsdFromResources(assemblyRCNet, "RCNet.Neural.Network.SM.PoolSettings.xsd"); validator.AddXsdFromResources(assemblyRCNet, "RCNet.RCNetTypes.xsd"); XElement poolSettingsElem = validator.Validate(elem, "rootElem"); //Parsing //Name InstanceName = poolSettingsElem.Attribute("instanceName").Value; //Dimensions Dim = new PoolDimensions(int.Parse(poolSettingsElem.Attribute("dimX").Value, CultureInfo.InvariantCulture), int.Parse(poolSettingsElem.Attribute("dimY").Value, CultureInfo.InvariantCulture), int.Parse(poolSettingsElem.Attribute("dimZ").Value, CultureInfo.InvariantCulture) ); RouteToReadout = bool.Parse(poolSettingsElem.Attribute("routeToReadout").Value); //Input XElement inputElem = poolSettingsElem.Descendants("input").First(); InputConnectionDensity = double.Parse(inputElem.Attribute("connectionDensity").Value, CultureInfo.InvariantCulture); InputSynapseWeight = new RandomValueSettings(inputElem.Descendants("weight").First()); //Excitatory XElement excitatoryElem = poolSettingsElem.Descendants("excitatory").First(); ExcitatoryActivation = new ActivationSettings(excitatoryElem.Descendants("activation").First()); ExcitatoryBias = new RandomValueSettings(excitatoryElem.Descendants("bias").First()); double excitatoryRelShare = double.Parse(excitatoryElem.Attribute("relShare").Value, CultureInfo.InvariantCulture); //Inhibitory XElement inhibitoryElem = poolSettingsElem.Descendants("inhibitory").First(); InhibitoryActivation = new ActivationSettings(inhibitoryElem.Descendants("activation").First()); InhibitoryBias = new RandomValueSettings(inhibitoryElem.Descendants("bias").First()); double inhibitoryRelShare = double.Parse(inhibitoryElem.Attribute("relShare").Value, CultureInfo.InvariantCulture); InhibitoryNeuronsDensity = inhibitoryRelShare / (inhibitoryRelShare + excitatoryRelShare); //Interconnection XElement interconnectionElem = poolSettingsElem.Descendants("interconnection").First(); InterconnectionDensity = double.Parse(interconnectionElem.Attribute("density").Value, CultureInfo.InvariantCulture); InterconnectionAvgDistance = interconnectionElem.Attribute("avgDistance").Value == "NA" ? 0d : double.Parse(interconnectionElem.Attribute("avgDistance").Value, CultureInfo.InvariantCulture); InterconnectionAllowSelfConn = bool.Parse(interconnectionElem.Attribute("allowSelfConnection").Value); InterconnectionSynapseWeight = new RandomValueSettings(interconnectionElem.Descendants("weight").First()); //Retainment neurons XElement retainmentElem = poolSettingsElem.Descendants("retainmentNeurons").FirstOrDefault(); RetainmentNeuronsFeature = (retainmentElem != null); if (RetainmentNeuronsFeature) { RetainmentNeuronsDensity = double.Parse(retainmentElem.Attribute("density").Value, CultureInfo.InvariantCulture); RetainmentMinRate = double.Parse(retainmentElem.Attribute("retainmentMinRate").Value, CultureInfo.InvariantCulture); RetainmentMaxRate = double.Parse(retainmentElem.Attribute("retainmentMaxRate").Value, CultureInfo.InvariantCulture); RetainmentNeuronsFeature = (RetainmentNeuronsDensity > 0 && RetainmentMaxRate > 0 ); } else { RetainmentNeuronsDensity = 0; RetainmentMinRate = 0; RetainmentMaxRate = 0; } return; }