public void PrepareAlgorithm(Experiment expData, NeuralNetwork ann) { if (ann == null || expData == null) { throw new Exception("Argument value cannot be null"); } //preparing the variables m_Network = ann; m_Experiment = expData; m_Parameters = m_Network.Parameters; m_IterationCounter = 0; IsAlgorthmPrepared = true; StopIteration = false; //report var rp = new ProgressIndicatorEventArgs() { ReportType = ProgramState.Started, LearningError = float.MaxValue, CurrentIteration = 0, LearnOutput = null, }; ReportProgress(rp); }
/// <summary> /// Before we start GP prepare all neccessery information /// </summary> /// <param name="termSet"></param> /// <param name="funSet"></param> /// <param name="annParams"></param> public override void PrepareAlgorithm(Experiment expData, ANNParameters annParams = null) { if (annParams == null || expData == null) { throw new Exception("Argument value cannot be null"); } //reset iteration and network if (m_Network == null) { m_IterationCounter = 0; m_Network = new BPNeuralNetwork(annParams, expData.GetColumnInputCount_FromNormalizedValue(), expData.GetColumnOutputCount_FromNormalizedValue()); m_Network.InitializeNetwork(); } m_Experiment = expData; m_Parameters = annParams; IsAlgorthmPrepared = true; StopIteration = false; m_expRowCount = m_Experiment.GetRowCount(); //Send report for iteration var rp = new ProgressIndicatorEventArgs() { ReportType = ProgramState.Started, LearningError = -1, CurrentIteration = 0, LearnOutput = null, }; ReportProgress(rp); }
/// <summary> /// Set GUI based on the GPParameters /// </summary> /// <param name="parameters"></param> /// <returns></returns> public bool SetParameters(ANNParameters parameters) { //Activation function if (parameters.m_ActFunction is Linear) { cmbActivationFuncs.SelectedIndex = 0; } else if (parameters.m_ActFunction is Binary) { cmbActivationFuncs.SelectedIndex = 1; } else if (parameters.m_ActFunction is Sigmoid) { cmbActivationFuncs.SelectedIndex = 2; } else if (parameters.m_ActFunction is BipolarSign) { cmbActivationFuncs.SelectedIndex = 3; } else if (parameters.m_ActFunction is TanH) { cmbActivationFuncs.SelectedIndex = 4; } //larning algoritm if (parameters.m_LearningAlgo == LearningAlgoritm.BP) { cmbLearningAlgorithm.SelectedIndex = 0; } else if (parameters.m_LearningAlgo == LearningAlgoritm.PSO) { cmbLearningAlgorithm.SelectedIndex = 1; } else { cmbLearningAlgorithm.SelectedIndex = 2; } txtActFunParam1.Text = parameters.m_ActFuncParam1.ToString(); txtMomentum.Text = parameters.m_Momentum.ToString(); textLearningRate.Text = parameters.m_LearningRate.ToString(); textNumHiddenLayers.Text = parameters.m_NumHiddenLayers.ToString(); textNeuronsOfEachLAyer.Text = parameters.m_NeuronsInHiddenLayer.ToString(); textParticles.Text = parameters.m_PSOParameters.m_ParticlesNumber.ToString(); textIWeight.Text = parameters.m_PSOParameters.m_IWeight.ToString(); textCWeight.Text = parameters.m_PSOParameters.m_GWeight.ToString(); textSWeight.Text = parameters.m_PSOParameters.m_LWeight.ToString(); return(true); }
public ANNSettingsPanel() { InitializeComponent(); LoadActivationFunsInCombo(); LoadSelectionMethodsInCOmbo(); var p = new ANNParameters(); SetParameters(p); }
/// <summary> /// /// </summary> /// <param name="param"></param> /// <param name="normalizedInputCount">number of input variables after normalization. In case of category column type we need to apply 1 og N rule.</param> /// <param name="normalizedOutputCount">number of output variables after normalization. In case of category column type we need to apply 1 og N rule.</param> public BCNeuralNetwork(ANNParameters param, int normalizedInputCount, int normalizedOutputCount) : base(param, normalizedInputCount, normalizedOutputCount) { }
public NeuralNetwork(ANNParameters param, int inputCount, int outputCount) { m_Parameters = param; m_InputCount = inputCount; m_OutputCount = outputCount; }
public abstract void PrepareAlgorithm(Experiment expData, ANNParameters annParams = null);
/// <summary> /// Resets factory to initial state /// </summary> public void ResetFactory() { m_Network = null; m_IterationCounter = 0; m_Parameters = null; }
public void SetANNParameters(ANNParameters annp) { m_Parameters = annp; }
/// <summary> /// Return current value of GP params /// </summary> /// <returns></returns> public ANNParameters GetParameters() { ANNParameters parameters = new ANNParameters(); if (!double.TryParse(txtMomentum.Text, out parameters.m_Momentum)) { MessageBox.Show("Invalid value for Momentum!"); return(null); } if (!double.TryParse(textLearningRate.Text, out parameters.m_LearningRate)) { MessageBox.Show("Invalid value for Learning Rate!"); return(null); } if (!int.TryParse(textNumHiddenLayers.Text, out parameters.m_NumHiddenLayers)) { MessageBox.Show("Invalid value for number of Layers!"); return(null); } if (!int.TryParse(textNeuronsOfEachLAyer.Text, out parameters.m_NeuronsInHiddenLayer)) { MessageBox.Show("Invalid value for number of Neurons in hidden Layer!"); return(null); } //parameters for activation function if (!double.TryParse(txtActFunParam1.Text, out parameters.m_ActFuncParam1)) { MessageBox.Show("Invalid value for Parameter 1!"); return(null); } //activation function if (cmbActivationFuncs.SelectedIndex == 0) { parameters.m_ActFunction = new Linear(); } else if (cmbActivationFuncs.SelectedIndex == 1) { parameters.m_ActFunction = new Binary(); } else if (cmbActivationFuncs.SelectedIndex == 2) { parameters.m_ActFunction = new Sigmoid(parameters.m_ActFuncParam1); } else if (cmbActivationFuncs.SelectedIndex == 3) { parameters.m_ActFunction = new BipolarSign(parameters.m_ActFuncParam1); } else { parameters.m_ActFunction = new TanH(); } //learning algo if (cmbLearningAlgorithm.SelectedIndex == 0) { parameters.m_LearningAlgo = LearningAlgoritm.BP; } else if (cmbLearningAlgorithm.SelectedIndex == 1) { parameters.m_LearningAlgo = LearningAlgoritm.PSO; } else { parameters.m_LearningAlgo = LearningAlgoritm.BP; } if (!int.TryParse(textParticles.Text, out parameters.m_PSOParameters.m_ParticlesNumber)) { MessageBox.Show("Invalid value for Number of Particles!"); return(null); } if (!double.TryParse(textIWeight.Text, out parameters.m_PSOParameters.m_IWeight)) { MessageBox.Show("Invalid value for Inertia Weight!"); return(null); } if (!double.TryParse(textCWeight.Text, out parameters.m_PSOParameters.m_LWeight)) { MessageBox.Show("Invalid value for Cognitive Weight!"); return(null); } if (!double.TryParse(textParticles.Text, out parameters.m_PSOParameters.m_GWeight)) { MessageBox.Show("Invalid value for Social Weight!"); return(null); } return(parameters); }
public BPNeuralNetwork(ANNParameters param, int inputCount, int outputCount) : base(param, inputCount, outputCount) { }
/// <summary> /// Before we start solver prepare all neccessery information /// </summary> /// <param name="termSet"></param> /// <param name="funSet"></param> /// <param name="annParams"></param> public override void PrepareAlgorithm(Experiment expData, ANNParameters annParams = null) { if (annParams == null || expData == null) { throw new Exception("Argument value cannot be null"); } //reset iteration and network if (m_Network == null) { m_IterationCounter = 0; //depending on the type of the colum create adequate neural network var colType = expData.GetOutputColumnType(); if (colType == ColumnDataType.Binary)//Binary Clasification { m_Network = new BCNeuralNetwork(annParams, expData.GetColumnInputCount_FromNormalizedValue(), expData.GetColumnOutputCount_FromNormalizedValue()); } else//multiclass classification { m_Network = new MCNeuralNetwork(annParams, expData.GetColumnInputCount_FromNormalizedValue(), expData.GetColumnOutputCount_FromNormalizedValue()); } m_Network.InitializeNetwork(); } // m_Experiment = expData; PSOParameters swarm = null; if (m_Parameters != null) { swarm = m_Parameters.m_PSOParameters; } else { m_psoAlgorithm = null; swarm = annParams.m_PSOParameters; } // m_Parameters = annParams; m_Parameters.m_PSOParameters = swarm; m_expRowCount = m_Experiment.GetRowCount(); IsAlgorthmPrepared = true; StopIteration = false; float newfitness = 0; if (m_psoAlgorithm == null) { //initilaize swarm m_Parameters.m_PSOParameters.m_Dimension = m_Network.GetWeightsAndBiasCout(); m_psoAlgorithm = new ParticleSwarm(m_Parameters.m_PSOParameters, CrossEntropy); //init newfitness = m_psoAlgorithm.InitSwarm(); } // else newfitness = m_psoAlgorithm.RunSwarm(); var model = CalculateModel(false); //Send report for iteration var rp = new ProgressIndicatorEventArgs() { ReportType = ProgramState.Started, LearningError = newfitness, CurrentIteration = 0, LearnOutput = model, }; ReportProgress(rp); }