Пример #1
0
        /// <summary>
        /// initialise neural network
        /// </summary>
        /// <param name="NumberOfInputs">the number of input vectors</param>
        /// <param name="LayerVectorDescription">description of hidden layers</param>
        /// <param name="ActivationFunction">the activation function (default sigmoid)</param>
        /// <param name="Learner">Learning algorithm (default: backpropagation)</param>
        public NeuralNetwork(int NumberOfInputs, int[] LayerVectorDescription, iActivationFunction ActivationFunction = null, LearningAlgorithm Learner = null)
        {
            if (LayerVectorDescription.Length < 1)
            {
                throw new Exception("NeuralNetwork -> cannot be built, since there are no neurons");
            }
            if (NumberOfInputs < 1)
            {
                throw new Exception("NeuralNetwork -> cannot be built, since it need at least 1 input");
            }

            ProtectedNumberOfInputs = NumberOfInputs;
            if (Learner != null)
            {
                TeacherAlgorithm = Learner;
            }
            else
            {
                TeacherAlgorithm = new BackPropagationLearningAlgorithm(this);
            }

            NetworkLayers    = new Layer[LayerVectorDescription.Length];
            NetworkLayers[0] = new Layer(LayerVectorDescription[0], ProtectedNumberOfInputs);
            for (int i = 1; i < LayerVectorDescription.Length; i++)
            {
                NetworkLayers[i] = new Layer(LayerVectorDescription[i], LayerVectorDescription[i - 1], ActivationFunction);
            }
        }
Пример #2
0
 private void radioButton1_CheckedChanged(object sender, EventArgs e)
 {
     learningAlgorithm    = LearningAlgorithm.C45Learning;
     radioButton3.Enabled = true;
     radioButton4.Enabled = true;
     radioButton5.Enabled = true;
 }
Пример #3
0
        private void btnLoadVS_Click(object sender, EventArgs e)
        {
            OpenFileDialog OFD = new OpenFileDialog();

            OFD.Title       = "Select validation sets file";
            OFD.Multiselect = false;
            OFD.DefaultExt  = ".ioset";
            if (OFD.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                List <IOSet> VS = LearningAlgorithm.LoadIOSetFromFile(OFD.FileName);
                if (VS.Count > 0)
                {
                    if (VS[0].InputSet.Count != NN.InputLayer.NumberOfNeurons || VS[0].OutputSet.Count != NN.OutputLayer.NumberOfNeurons)
                    {
                        MessageBox.Show("The validation sets does not match neural network parameters");
                    }
                    else
                    {
                        try
                        {
                            txtVSFile.Text = OFD.FileName;
                            dgvValidationSets.Rows.Clear();
                            dgvValidationSets.Columns.Clear();
                            int c = 0;
                            for (int i = 1; i <= VS[0].InputSet.Count; i++)
                            {
                                dgvValidationSets.Columns.Add("col" + c++, "Input " + i);
                            }
                            for (int i = 1; i <= VS[0].OutputSet.Count; i++)
                            {
                                dgvValidationSets.Columns.Add("col" + c++, "Output " + i);
                            }
                            for (int i = 0; i < VS.Count; i++)
                            {
                                dgvValidationSets.Rows.Add();
                                c = 0;
                                for (int j = 0; j < VS[i].InputSet.Count; j++)
                                {
                                    dgvValidationSets[c++, i].Value = VS[i].InputSet[j];
                                }
                                for (int j = 0; j < VS[i].OutputSet.Count; j++)
                                {
                                    dgvValidationSets[c++, i].Value = VS[i].OutputSet[j];
                                }
                            }
                            BPN.ValidationSets = VS;
                        }
                        catch
                        {
                            MessageBox.Show("An error occured while loading the validation set file. May be the file is corrupted");
                            txtVSFile.Clear();
                            dgvValidationSets.Rows.Clear();
                            dgvValidationSets.Columns.Clear();
                        }
                    }
                }
            }
        }
Пример #4
0
        public static string ToString(LearningAlgorithm la)
        {
            switch (la)
            {
            case LearningAlgorithm.C45Learning:
                return("C4.5 Learning");

            case LearningAlgorithm.ID3Learning:
                return("ID3 Learning");

            default:
                return("undefined");
            }
        }
Пример #5
0
        public ProcessConfiguration GetConfiguration(PipelineContext context)
        {
            var pc = new ProcessConfiguration();

            pc["LearningAlgorithm"] = LearningAlgorithm.ToString();
            pc["Iterations"]        = Iterations.ToString();
            pc["Dimensions"]        = Dimensions;
            pc["LearningRate"]      = LearningRate.ToString();
            pc["TrainFile"]         = context.GetAsString("TrainFile");
            pc["TestFile"]          = context.GetAsString("TestFile");
            pc["AdditionalArgs"]    = _additionalArgs;

            return(pc);
        }
Пример #6
0
 private void btnSaveVS_Click(object sender, EventArgs e)
 {
     try
     {
         List <IOSet> vsets = new List <IOSet>();
         for (int i = 0; i < dgvValidationSets.Rows.Count; i++)
         {
             try
             {
                 int c    = 0;
                 Set iset = new Set();
                 for (int j = 0; j < NN.InputLayer.NumberOfNeurons; j++)
                 {
                     iset.Add(double.Parse(dgvValidationSets[c++, i].Value.ToString()));
                 }
                 Set oset = new Set();
                 for (int j = 0; j < NN.OutputLayer.NumberOfNeurons; j++)
                 {
                     oset.Add(double.Parse(dgvValidationSets[c++, i].Value.ToString()));
                 }
                 vsets.Add(new IOSet(iset, oset));
             }
             catch
             {
             }
         }
         if (vsets.Count > 0)
         {
             SaveFileDialog sfd = new SaveFileDialog();
             sfd.Title        = "Save validation set";
             sfd.DefaultExt   = ".ioset";
             sfd.AddExtension = true;
             if (sfd.ShowDialog() == System.Windows.Forms.DialogResult.OK)
             {
                 string filename = sfd.FileName;
                 if (LearningAlgorithm.SaveIOSetToFile(filename, vsets))
                 {
                     MessageBox.Show("Validation sets successfully saved", "Saved", MessageBoxButtons.OK, MessageBoxIcon.Information);
                 }
             }
         }
     }
     catch
     {
         MessageBox.Show("An error occured, validation sets cannot be saved.", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
     }
 }
Пример #7
0
 // Factory method return new ModelType
 public static ClassificationModel CreateModel(LearningAlgorithm learningAlgorithm)
 {
     switch (learningAlgorithm)
     { 
         case LearningAlgorithm.C45:
             return new C45Model();
             
         case LearningAlgorithm.NaiveBayesS:
             return new NaiveBayesSModel();
             
         case LearningAlgorithm.NaiveBayes:
             return new NaiveBayesModel();        
         case LearningAlgorithm.SVM:
             return new SVMModel();
         default :
             throw new ArgumentException("Not yet implement model");
     }
 }
Пример #8
0
 public Form1()
 {
     InitializeComponent();
     learningAlgorithm    = LearningAlgorithm.C45Learning;
     decisionVariableType = AttributeType.Discrete;
     this.MaximumSize     = new Size(809, 497);
     this.MaximizeBox     = false;
     ofd = new OpenFileDialog();
     this.CenterToScreen();
     radioButton2.Hide();
     bindAlgorithmType();
     drawGraph();
     nodeTitles = WeightedDirectedGraph.VertexTitleList;
     bindStartNodes();
     bindGoalNodes();
     depthInput.Minimum     = 1;
     depthInput.Maximum     = graphSample.Count;
     adjMatrixGraphNodeList = AdjacencyMatrixGraph.NodeList;
 }
        public async Task CreateForecastingTaskMLModel(string entityName, LearningAlgorithm algorithm, bool isValidationNeeded = true)
        {
            entityName = entityName?.Trim();
            if (isValidationNeeded)
            {
                if (!await DoesForecastingTaskEntityExist(entityName))
                {
                    throw new DomainErrorException($"Forecasting task with name {entityName} doesn't exist!");
                }
            }

            try
            {
                var taskEntity = await _forecastingTasksRepository.GetForecastingTaskEntity(entityName);

                if (taskEntity.Records.Count == 0)
                {
                    throw new DomainErrorException("There are no data in the database!");
                }
                var nonInformationFields = taskEntity.FieldsDeclaration.Where(x => x.Type != FieldType.InformationField).ToList();
                var entity   = new ClassBuilder(entityName, GetFieldsType(nonInformationFields));
                var dataList = (IList)Activator.CreateInstance(typeof(List <>).MakeGenericType(entity.Type));
                foreach (var fieldsValue in taskEntity.Records)
                {
                    var myClassInstance = entity.CreateObject();
                    foreach (var fieldDeclaration in nonInformationFields)
                    {
                        var value = fieldsValue.FieldsValue.Single(x => x.FieldId == fieldDeclaration.Id).Value;
                        entity.SetPropertyValue(myClassInstance, fieldDeclaration.Name, float.Parse(value));
                    }
                    dataList.Add(myClassInstance);
                }

                var factors        = nonInformationFields.Where(x => x.Type == FieldType.Factor).Select(x => x.Name);
                var predictedValue = nonInformationFields.Single(x => x.Type == FieldType.PredictionField).Name;
                ForecastingTaskModelBuilder.CreateModel(dataList, entityName, factors, predictedValue, algorithm);
            }
            catch (Exception)
            {
                throw;
            }
        }
Пример #10
0
 private ThotSmtParameters(ThotSmtParameters other)
 {
     _tmFileNamePrefix     = other._tmFileNamePrefix;
     _lmFileNamePrefix     = other._lmFileNamePrefix;
     _modelNonMonotonicity = other._modelNonMonotonicity;
     _modelW              = other._modelW;
     _modelA              = other._modelA;
     _modelE              = other._modelE;
     _modelHeuristic      = other._modelHeuristic;
     _modelWeights        = other._modelWeights;
     _learningAlgorithm   = other._learningAlgorithm;
     _learningRatePolicy  = other._learningRatePolicy;
     _learningStepSize    = other._learningStepSize;
     _learningEMIters     = other._learningEMIters;
     _learningE           = other._learningE;
     _learningR           = other._learningR;
     _decoderS            = other._decoderS;
     _decoderBreadthFirst = other._decoderBreadthFirst;
     _decoderG            = other._decoderG;
 }
Пример #11
0
        // Factory method return new ModelType
        public static ClassificationModel CreateModel(LearningAlgorithm learningAlgorithm)
        {
            switch (learningAlgorithm)
            {
            case LearningAlgorithm.C45:
                return(new C45Model());

            case LearningAlgorithm.NaiveBayesS:
                return(new NaiveBayesSModel());

            case LearningAlgorithm.NaiveBayes:
                return(new NaiveBayesModel());

            case LearningAlgorithm.SVM:
                return(new SVMModel());

            default:
                throw new ArgumentException("Not yet implement model");
            }
        }
        public RBF(int K, int numOfEpocks, double learningRate, double maxError, List <List <double> > trainingSamples, List <List <double> > trainingLabels, List <List <double> > testSamples, List <List <double> > testLabels)
        {
            this.numOfEpocks     = numOfEpocks;
            this.trainingSamples = trainingSamples;
            this.trainingLabels  = trainingLabels;
            this.testSamples     = testSamples;
            this.testLabels      = testLabels;
            this.maxError        = maxError;
            this.learningRate    = learningRate;
            Centroids            = new List <List <double> >(); //[NumOfCluster][NumOfFeatures (means)]
            Random rnd     = new Random(Guid.NewGuid().GetHashCode());
            int    counter = K;                                 // Num of Clusters

            while (counter-- > 0)
            {
                int index = rnd.Next(trainingSamples.Count);
                Centroids.Add(trainingSamples[index]);
            }

            KMean    = new K_Means(ref Centroids, trainingSamples);
            Variance = KMean.GetVariance;
            ////

            ////
            List <int> numOfNeuronsPerLayer = new List <int>();

            backpropagation = new Backpropagation();

            numOfNeuronsPerLayer.Add(K);
            numOfNeuronsPerLayer.Add(3);

            this.neuralNetwork = new FeedforwardNeuralNetwrok(2);
            this.neuralNetwork.setNetwork(numOfNeuronsPerLayer);

            this.neuralNetwork.LMSsetLayer(1, new IdentityFunction());
        }
Пример #13
0
 private string BuildArguments(string trainFile, string testFile, string testOutput)
 {
     return(String.Format("-task r -train {0} -test {1} -method {2} -iter {3} -dim {4} -learn_rate {5} -out {6} -regular {7}",
                          trainFile, testFile, LearningAlgorithm.ToString().ToLower(), Iterations, Dimensions, LearningRate, testOutput, Regularization));
 }
Пример #14
0
        private static IEstimator <ITransformer> BuildTrainingPipeline(MLContext mlContext, IEnumerable <string> factorNames, string predicatedValueName, LearningAlgorithm algorithm)
        {
            var dataProcessPipeline = mlContext.Transforms.Concatenate("Features", factorNames.ToArray());
            IEstimator <ITransformer> trainer;

            switch (algorithm)
            {
            case LearningAlgorithm.FastForest:
                trainer = mlContext.Regression.Trainers.FastForest(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            case LearningAlgorithm.FastTree:
                trainer = mlContext.Regression.Trainers.FastTree(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            case LearningAlgorithm.FastTreeTweedie:
                trainer = mlContext.Regression.Trainers.FastTreeTweedie(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            case LearningAlgorithm.Gam:
                trainer = mlContext.Regression.Trainers.Gam(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            case LearningAlgorithm.LbfgsPoissonRegression:
                trainer = mlContext.Regression.Trainers.LbfgsPoissonRegression(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            case LearningAlgorithm.LightGbm:
                trainer = mlContext.Regression.Trainers.LightGbm(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            //case LearningAlgorithm.OnlineGradientDescent:
            //    trainer = mlContext.Regression.Trainers.OnlineGradientDescent(labelColumnName: predicatedValueName, featureColumnName: "Features");
            //    break;
            case LearningAlgorithm.Sdca:
                trainer = mlContext.Regression.Trainers.Sdca(labelColumnName: predicatedValueName, featureColumnName: "Features");
                break;

            default:
                throw new Exception($"Algorithm {algorithm} was not implemented!");
            }
            return(dataProcessPipeline.Append(trainer));
        }
Пример #15
0
        public static void CreateModel(dynamic data, string entityName, IEnumerable <string> factorNames, string predicatedValueName, LearningAlgorithm algorithm)
        {
            IDataView trainingDataView = mlContext.Data.LoadFromEnumerable(data);

            IEstimator <ITransformer> trainingPipeline = BuildTrainingPipeline(mlContext, factorNames, predicatedValueName, algorithm);

            ITransformer dataPrepTransformer = trainingPipeline
                                               .Fit(trainingDataView);

            IDataView transformedData = dataPrepTransformer.Transform(trainingDataView);

            var trainedModel = trainingPipeline.Fit(transformedData);

            SaveModel(mlContext, trainedModel, transformedData.Schema, dataPrepTransformer, trainingDataView.Schema, entityName);
        }
Пример #16
0
 public Task CreateTaskEntityPredictionModel(string taskEntityName, LearningAlgorithm learningAlgorithm)
 {
     return(_machineLearningService.CreateForecastingTaskMLModel(taskEntityName, learningAlgorithm));
 }
Пример #17
0
 private void radioButton2_CheckedChanged(object sender, EventArgs e)
 {
     learningAlgorithm    = LearningAlgorithm.ID3Learning;
     radioButton3.Enabled = false;
     radioButton5.Enabled = false;
 }