const uint iterations_between_reports = 1000;    // nie potrzene

        //static NeuralNetwork()
        //{
        //    net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_hidden, num_output);//typ warstwowa , ilosc warstw , wejscia, ukryta 1 (80) ,ukryta 2(60),wyjscia
        //}

        public void Train(string path)
        {
            //net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_hidden, num_output);//typ warstwowa , ilosc warstw , wejscia, ukryta 1 (80) ,ukryta 2(60),wyjscia


            if (path.Split('.').Last() == "net")
            {
                net = new NeuralNet(path);
            }
            else
            {
                net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_hidden, num_output);
                net.LearningRate = learning_rate;
                net.ActivationSteepnessHidden = 0.8F;
                net.ActivationSteepnessOutput = 1.0F;
                net.ActivationFunctionHidden  = ActivationFunction.SIGMOID_SYMMETRIC; //funkcja aktywacji sigmoidalna stepowa w wartwie ukrytej
                net.ActivationFunctionOutput  = ActivationFunction.SIGMOID;           //funkcja aktywacji sigmoidalna w wartwie wyjsciowej
                net.TrainingAlgorithm         = TrainingAlgorithm.TRAIN_RPROP;        //rodzaj nauki - propagacja wsteczna
                using (TrainingData data = new TrainingData())
                {
                    if (data.ReadTrainFromFile(path))                                                     // czy plik istnieje jak tak to czyta
                    {
                        net.InitWeights(data);                                                            //inicjalizowanie wag wartośćiami losowymi
                        net.TrainOnData(data, max_iterations, iterations_between_reports, desired_error); //nauka sieci
                    }
                }
            }
        }
Example #2
0
        static void Main(string[] argv)
        {
            const uint max_epochs = 1000;
            uint num_threads = 1;
            TrainingData data;
            NeuralNet net;
            long before;
            float error;

            if (argv.Length == 2)
                num_threads = UInt32.Parse(argv[1]);
            using (data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
            using (net = new NeuralNet(NetworkType.LAYER, 3, data.InputCount, 32, data.OutputCount))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                before = Environment.TickCount;
                for (int i = 1; i <= max_epochs; i++)
                {
                    error = num_threads > 1 ? net.TrainEpochIrpropmParallel(data, num_threads) : net.TrainEpoch(data);
                    Console.WriteLine("Epochs     {0}. Current error: {1}", i.ToString("00000000"), error.ToString("0.0000000000"));
                }

                Console.WriteLine("ticks {0}", Environment.TickCount - before);
                Console.ReadKey();
            }
        }
        private void OpenFile(object sender, RoutedEventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.DefaultExt = "*.pgm";
            //dlg.Filter = "PGM Files (*.pgm)|*.pgm|JPEG Files (*.jpeg)|*.jpeg|PNG Files (*.png)|*.png|JPG Files (*.jpg)|*.jpg|GIF Files (*.gif)|*.gif";
            dlg.Filter      = "Image Files (*.pgm, *.jpeg, *.png, *.jpg, *.gif)|*.pgm; *.jpeg; *.png; *.jpg; *.gif";
            dlg.Multiselect = true;
            bool?result = dlg.ShowDialog();

            if (result == true)
            {
                paths = new TrainingData[dlg.FileNames.Length];
                for (int i = 0; i < paths.Length; i++)
                {
                    paths[i] = new TrainingData {
                        filePath = dlg.FileNames[i], label = 0
                    };
                }
                pathIndex         = 0;
                LabelCurrent.Text = string.Format("{0}/{1}", pathIndex + 1, paths.Length);
                SetPathToWindow(paths[pathIndex]);
                isEditingXML      = false;
                BDelete.IsEnabled = false;
                BUpdate.IsEnabled = false;
            }
        }
Example #4
0
        private double GradientB2(TrainingData sample, int index, double o, double[] w, double[] z)
        {
            var miB      = SigmoidB(sample.Y, index);
            var gradient = -1 * (sample.Y - A2[index]) * miB * (1 - miB) * SigmoidB(sample.Y, index);

            return(GradientO(o, sample) * GradientW(index, w, z) * gradient);
        }
Example #5
0
        private double GradientB1(TrainingData sample, int index, double o, double[] w, double[] z)
        {
            var miA      = SigmoidA(sample.X, index);
            var gradient = -1 * (sample.X - A1[index]) * miA * (1 - miA) * SigmoidB(sample.Y, index);

            return(GradientO(o, sample) * GradientW(index, w, z) * gradient);
        }
Example #6
0
        static void Main()
        {
            const uint  num_input              = 3;
            const uint  num_output             = 1;
            const uint  num_layers             = 4;
            const uint  num_neurons_hidden     = 5;
            const float desired_error          = 0.0001F;
            const uint  max_epochs             = 5000;
            const uint  epochs_between_reports = 1000;

            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.LINEAR;
                net.TrainingAlgorithm        = TrainingAlgorithm.TRAIN_RPROP;
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    net.SetScalingParams(data, -1, 1, -1, 1);
                    net.ScaleTrain(data);

                    net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);
                    net.Save("..\\..\\..\\datasets\\scaling.net");

                    Console.ReadKey();
                }
            }
        }
Example #7
0
 public OCRHandler(TrainingDataOptions openOptions)
 {
     TrainingData TDLibrary = new TrainingData(openOptions);
     trainingLibrary = TDLibrary.trainingLibrary;
     listOfIndicies = TDLibrary.listOfIndicies;
     listOfIndexLabels = TDLibrary.listOfIndexLabels;
 }
        public async Task <IActionResult> Post([FromBody] TrainingData data)
        {
            var days   = 0;
            var result = string.Empty;

            //Validate data.
            result = data.ValidateDates(out days);

            try
            {
                //If valid data then save.
                if (days > 0)
                {
                    var trainingDA   = new TrainingDA();
                    var trainingData = new Training {
                        TrainingName = data.TrainingName, StartDate = data.StartDate, EndDate = data.EndDate
                    };
                    var saveFlag = await trainingDA.SaveTraining(trainingData);

                    result = Constants.ResponseMessages.Training_Save_Success;
                    return(Ok(new { result = result, days = days }));
                }
                return(BadRequest(new { error = result, days = days }));
            }
            catch (Exception ex)
            {
                //Need to log the error in logs like elastic search or log4net.
                return(new StatusCodeResult(StatusCodes.Status500InternalServerError));
            }
        }
Example #9
0
        static void Main(string[] args)
        {
            var result = Parser.Default.ParseArguments <Options>(args);

            var parsed = result as Parsed <Options>;

            if (parsed != null)
            {
                SetupLogging();

                var options = parsed.Value;

                var logger = LogManager.GetLogger("Trainer");
                logger.Info("Generating training data ...");

                if (!options.SkipGeneration)
                {
                    var data = new TrainingData();

                    data.Generate(options.TrainingSet);
                    data.Generate(options.TestSet);
                }

                var trainer = new NetworkTrainer();
                trainer.Run(options.Model, options.TrainingSet, options.TestSet);
            }
            else
            {
                Console.WriteLine(HelpText.AutoBuild(result));
            }
        }
        public ContextSensitiveSpellingCorrection(IPOSTagger posTagger, IEnumerable <string> corpora, IEnumerable <string[]> confusionSets, bool prune)
        {
            _posTagger = posTagger;
            _contextFeaturesExtractor      = new ContextFeaturesExtractor(k);
            _collocationtFeaturesExtractor = new CollocationFeaturesExtractor(l);
            _statsHelper = new StatsHelper();
            _comparators = new List <Comparator>(confusionSets.Count());

            Sentence[] sentences = PreProcessCorpora(corpora).ToArray();


            /*processed corpus was serialized for faster results between trials*/
            XmlSerializer x  = new XmlSerializer(typeof(Sentence[]));
            FileStream    fs = new FileStream(@"Sentence.xml", FileMode.Open);

            x.Serialize(fs, sentences);
            fs.Close();
            sentences = (Sentence[])x.Deserialize(new FileStream(@"Sentence.xml", FileMode.Open));
            Console.WriteLine("Deserialize complete");

            var featureFrequencies = new Dictionary <string, Dictionary <string, int> >(StringComparer.OrdinalIgnoreCase);

            if (prune)
            {
                /* preprocess terms' frequencies */
                featureFrequencies = _statsHelper.GetFrequencies(sentences);
            }

            Parallel.ForEach(confusionSets, confusionSet =>
            {
                TrainingData output = GenerateTrainingData(sentences, prune, featureFrequencies, confusionSet);

                Train(confusionSet, output.Features.ToArray(), output.Samples);
            });
        }
Example #11
0
        public IHttpActionResult Post([FromBody] TrainingData training)
        {
            try
            {
                if (training == null)
                {
                    return(BadRequest("Request had paramters"));
                }

                DateTime startDate = (DateTime)training.StartDate;
                DateTime endDate   = (DateTime)training.EndDate;

                int days = (endDate - startDate).Days;
                using (TrainingEntities entities = new TrainingEntities())
                {
                    entities.TrainingDatas.Add(training);
                    entities.SaveChanges();
                }
                return(Ok(days));
            }
            catch (Exception ex)
            {
                return(InternalServerError(ex));
            }
        }
Example #12
0
        static void Main(string[] argv)
        {
            const uint   max_epochs  = 1000;
            uint         num_threads = 1;
            TrainingData data;
            NeuralNet    net;
            long         before;
            float        error;

            if (argv.Length == 2)
            {
                num_threads = UInt32.Parse(argv[1]);
            }
            using (data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
                using (net = new NeuralNet(NetworkType.LAYER, 3, data.InputCount, 32, data.OutputCount))
                {
                    net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                    net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                    before = Environment.TickCount;
                    for (int i = 1; i <= max_epochs; i++)
                    {
                        error = num_threads > 1 ? net.TrainEpochIrpropmParallel(data, num_threads) : net.TrainEpoch(data);
                        Console.WriteLine("Epochs     {0}. Current error: {1}", i.ToString("00000000"), error.ToString("0.0000000000"));
                    }

                    Console.WriteLine("ticks {0}", Environment.TickCount - before);
                    Console.ReadKey();
                }
        }
Example #13
0
    public Data(byte[] data, byte label)
    {
        total    = ConstantValues.TOTAL_IMAGES;
        len      = ConstantValues.TOTAL_PIXELS;
        testing  = new float[(int)Mathf.Floor(0.2f * total), len];      //4000 (images for testing)
        training = new TrainingData(label);                             //4000 (images for testing)

        for (int i = 0; i < total; i++)
        {
            int offset    = i * len;
            int threshold = (int)Mathf.Floor(0.8f * total);               //16000 (images for training)

            //Training
            if (i < threshold)
            {
                for (int p = 0; p < len; ++p)
                {
                    training.data[i, p] = data[offset + p] / 255.0f;
                }

                training.label = label;
                training.data[i, ConstantValues.TOTAL_PIXELS] = label;
            }
            //Testing
            else
            {
                for (int p = 0; p < len; ++p)
                {
                    testing[i - threshold, p] = data[offset + p] / 255.0f;
                }
            }
        }
    }
        public ActionResult LoadFromFile() //this is one time function, which we use for loading the data from external file
        {
            return(View());                //return because we loaded it once


            if (!User.IsInRole("Admin")) //only admin can do this
            {
                return(new HttpUnauthorizedResult("Unauthorized"));
            }
            var fileContents = System.IO.File.ReadAllText(Server.MapPath("~/Content/load.csv"));

            foreach (var line in fileContents.Split('\n'))
            {
                var          arr = line.ToString().Split('~');
                TrainingData tmp = new TrainingData(); //build new object of Training Data
                if (arr.Length == 2)
                {                                      //update it's data members from the file
                    tmp.title = arr[0];
                    tmp.word  = arr[1];
                    db.TrainingDatas.Add(tmp); //add it to the Training Data table in the DB
                }
            }
            db.SaveChanges();

            return(Content(fileContents));
        }
        public void TrainXor()
        {
            NeuralNetworkBuilder builder = new NeuralNetworkBuilder();
            NeuralNetwork        network = builder.CreateNew()
                                           .AddInputLayer(2, ActivationFunctions.Sigmoid, false)
                                           .AddHiddenLayer(2, ActivationFunctions.Sigmoid, false)
                                           .AddOutputLayer(1, ActivationFunctions.Sigmoid)
                                           .GetNetwork();
            BackPropagationTrainer trainer = new BackPropagationTrainer(network);

            TrainingData[] data = new TrainingData[]
            {
                new TrainingData(new double[] { 1, 1 }, new double[] { 0 }),
                new TrainingData(new double[] { 1, 0 }, new double[] { 1 }),
                new TrainingData(new double[] { 0, 1 }, new double[] { 1 }),
                new TrainingData(new double[] { 0, 0 }, new double[] { 0 }),
            };
            var result = trainer.Train(data, 100000, 0.7, 0.0, 0.005);

            string csv = result.Errors.ToCsvString();

            Assert.IsTrue(result.IsSuccessful, "could not traing against expected error");

            double[] result1 = network.Run(new double[] { 1.0, 1.0 });
            double[] result2 = network.Run(new double[] { 1.0, 0.0 });
            double[] result3 = network.Run(new double[] { 0.0, 1.0 });
            double[] result4 = network.Run(new double[] { 0.0, 0.0 });

            Assert.IsTrue(result1[0].IsEqual(0, 0.0005));
            Assert.IsTrue(result2[0].IsEqual(1, 0.0005));
            Assert.IsTrue(result3[0].IsEqual(1, 0.0005));
            Assert.IsTrue(result4[0].IsEqual(0, 0.0005));
        }
Example #16
0
        private void CreateTrainAndTestData(double[][] inputSetsLocal, double[][] outputSetsLocal)
        {
            try
            {
                trainData = new TrainingData();
                trainData.SetTrainData(inputSetsLocal, outputSetsLocal);
                trainData.ScaleTrainData(-1.0, 1.0);

                testData       = new TrainingData(trainData);
                testDataOffset = trainData.TrainDataLength / (uint)Configuration.TestDataAmountPerc;
                testData.SubsetTrainData(0, testDataOffset);
                testData.SaveTrain($@"{GetTempPath()}\testdata.dat");
                testSetInput  = testData.Input;
                testSetOutput = testData.Output;

                trainData.SubsetTrainData(testDataOffset, trainData.TrainDataLength - testDataOffset);
                trainData.SaveTrain($@"{GetTempPath()}\traindata.dat");
                trainSetInput  = trainData.Input;
                trainSetOutput = trainData.Output;
            }
            catch (Exception e)
            {
                debug($"Exception '{e.Message}' while working with train data");
                Program.Form.SetStatus($"Exception '{e.Message}' while working with train data");
                ClearAllParameters();
            }
        }
Example #17
0
        static void Main()
        {
            DataType[] calc_out;
            Console.WriteLine("Creating network.");

            using(NeuralNet net = new NeuralNet("..\\..\\..\\examples\\scaling.net"))
            {
                net.PrintConnections();
                net.PrintParameters();
                Console.WriteLine("Testing network.");
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        net.ScaleInput(data.GetTrainInput((uint)i));
                        calc_out = net.Run(data.GetTrainInput((uint)i));
                        net.DescaleOutput(calc_out);
                        Console.WriteLine("Result {0} original {1} error {2}", calc_out[0], data.OutputAccessor[i][0],
                                          FannAbs(calc_out[0] - data.OutputAccessor[i][0]));
                    }
                        Console.ReadKey();
                }
            }
        }
Example #18
0
    private static void BuildAddressList(string[] Raw)
    {
        foreach (string Address in Raw)
        {
            TrainingData Data = new TrainingData();

            var Components = Address.Split(',');
            Data.Street = Components[0];
            var Coordinate = Components[1].Split(' ');

            Data.Latitude  = float.Parse(Coordinate[1]);
            Data.Longitude = float.Parse(Coordinate[0]);

            Int32 Id       = 0;
            float Rainfall = 0.0f;
            Int32 Shortest = Int32.MaxValue;
            foreach (var Gauge in Gauges)
            {
                var Temp = CalculateDistance(Data.Latitude, Data.Longitude, Gauge.Latitude, Gauge.Longitude);
                if (Temp < Shortest && Gauge.MeterId != 2300)
                {
                    Shortest = Temp;
                    Id       = Gauge.MeterId;
                    Rainfall = Gauge.RainfallTotal;
                }
            }

            Data.MeterId       = Id;
            Data.MeterDistance = Shortest;
            Data.Rainfall      = Rainfall;
            Sources.Add(Data);
        }
    }
Example #19
0
        public void Setup(TrainingData trainData, TestData testData)
        {
            this.trainData = trainData;
            this.testData = testData;

            uod = DataFormatter.GetUserOrderRepositories(trainData);
        }
Example #20
0
 public Form1()
 {
     trainingData = new TrainingData();
     InitializeComponent();
     initializeNeurons();
     initNeuronsPrev();
 }
Example #21
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.001F;

            using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\robot.train"))
            using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\robot.test"))
            {
                for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F)
                {
                    Console.WriteLine("============= momentum = {0} =============\n", momentum);
                    using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount))
                    {
                        net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;

                        net.LearningMomentum = momentum;

                        net.TrainOnData(trainData, 20000, 5000, desired_error);

                        Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData));
                        Console.WriteLine("MSE error on test data: {0}", net.TestData(testData));
                    }

                }
            }
            Console.ReadKey();
        }
        private static List <TrainingData <float[], float[]> > GetTrainingData(string imagesPath, string labelsPath)
        {
            var trainingData = new List <TrainingData <float[], float[]> >();

            using (var labels = new BinaryReader(new FileStream(labelsPath, FileMode.Open)))
                using (var images = new BinaryReader(new FileStream(imagesPath, FileMode.Open)))
                {
                    //need to read some unnecessary values because of file format.
                    var magicNumber    = ReadBigInt32(images);
                    var numberOfImages = ReadBigInt32(images);
                    var width          = ReadBigInt32(images);
                    var height         = ReadBigInt32(images);

                    var magicLabel     = ReadBigInt32(labels);
                    var numberOfLabels = ReadBigInt32(labels);

                    for (var imageIndex = 0; imageIndex < numberOfImages; imageIndex++)
                    {
                        var bytes = images.ReadBytes(width * height);

                        var trainingDataItem = new TrainingData <float[], float[]>
                        {
                            //255 + 1 in order to make target no 1, but 0.9999.
                            Data    = bytes.Select(b => (float)b / (255 + 1)).ToArray(),
                            Targets = ConvertNumberToTargetValues(labels.ReadByte())
                        };

                        trainingData.Add(trainingDataItem);
                    }
                }

            return(trainingData);
        }
Example #23
0
    /*-------------------------
    *  Private Methods
    *  -------------------------*/
    private void InverseResult(TrainingData data)
    {
        if (data.outData.left < 0.5f)
        {
            data.outData.left = 1.0f;
        }
        else
        {
            data.outData.left = 0.0f;
        }

        if (data.outData.right < 0.5f)
        {
            data.outData.right = 1.0f;
        }
        else
        {
            data.outData.right = 0.0f;
        }

        if (data.outData.forward < 0.5f)
        {
            data.outData.forward = 1.0f;
        }
        else
        {
            data.outData.forward = 0.0f;
        }
    }
        public IHttpActionResult PutTrainingData(int id, TrainingData trainingData)
        {
            if (!ModelState.IsValid)
            {
                return(BadRequest(ModelState));
            }

            if (id != trainingData.ID)
            {
                return(BadRequest());
            }

            db.Entry(trainingData).State = EntityState.Modified;

            try
            {
                db.SaveChanges();
            }
            catch (DbUpdateConcurrencyException)
            {
                if (!TrainingDataExists(id))
                {
                    return(NotFound());
                }
                else
                {
                    throw;
                }
            }

            return(StatusCode(HttpStatusCode.NoContent));
        }
Example #25
0
        public void recorrerLectura(List<string> lectura, ref TrainingData td)
        {
            int i;
            //Recorro cada linea del txt para ingresar los datos de entrenamiento
            foreach (String linea in lectura)
            {

                for (i = 0; i <= 3; i++)
                {
                    try
                    {   if(!linea.Split(';')[i].Equals("")){
                        cargarNumeroComoArrayInput(Convert.ToInt32(linea.Split(';')[i]), ref td);
                        }
                    }
                    catch (FormatException e)
                    {
                        Console.WriteLine("Input string is not a sequence of digits.");
                    }
                    catch (OverflowException e)
                    {
                        Console.WriteLine("The number cannot fit in an Int32.");
                    }

                    cargarNumeroComoArrayOutput(Convert.ToInt32(linea.Split(';')[4]), ref td);

                }

            }
        }
Example #26
0
        public LotteryPredictionResult PredictionOneToFile(string webRootPath, string noSite, string noType, TrainingData data, string lotteryCode)
        {
            var    pipeline = new LearningPipeline();
            string dataPath = webRootPath + $"/TrainingGround/{noSite}{noType}.txt";

            pipeline.Add(new TextLoader(dataPath).CreateFrom <TrainingData>(separator: ','));
            pipeline.Add(new Dictionarizer("Label"));
            pipeline.Add(new ColumnConcatenator("Features", TrainingData.GetColumns()));
            pipeline.Add(new LogisticRegressionBinaryClassifier());
            pipeline.Add(new PredictedLabelColumnOriginalValueConverter()
            {
                PredictedLabelColumn = "PredictedLabel"
            });
            _logger.LogInformation("Start PredictionOne :" + lotteryCode + "—" + noSite + noType);
            var               model      = pipeline.Train <TrainingData, LotteryPrediction>();
            var               testData   = new TextLoader(dataPath).CreateFrom <TrainingData>(separator: ',');
            var               evaluator  = new BinaryClassificationEvaluator();
            var               metrics    = evaluator.Evaluate(model, testData);
            TrainingData      newPoint   = data;
            LotteryPrediction prediction = model.Predict(newPoint);
            string            result     = prediction.PredictedLabels;

            _logger.LogInformation("End PredictionOne :" + lotteryCode + "—" + noSite + noType);
            return(new LotteryPredictionResult()
            {
                PredictionType = noType,
                PredictionSite = noSite,
                PredictionResult = result,
                LotteryCode = lotteryCode
            });
        }
        static List <TrainingData <float[], float> > GetNandTrainingData()
        {
            var trainingDataFilePath = $"{Environment.CurrentDirectory}/Resources/NAND_training_data.txt";

            var trainingDataSet = new List <TrainingData <float[], float> >();
            var lines           = File.ReadAllLines(trainingDataFilePath);

            foreach (var line in lines)
            {
                var splitLine = line.Split(" ");

                var firstInput  = float.Parse(splitLine[0]);
                var secondInput = float.Parse(splitLine[1]);
                var target      = float.Parse(splitLine[2]);

                var trainingData = new TrainingData <float[], float>
                {
                    Data    = new[] { firstInput, secondInput },
                    Targets = target
                };

                trainingDataSet.Add(trainingData);
            }

            return(trainingDataSet);
        }
Example #28
0
        static void Main()
        {
            DataType[] calc_out;
            Console.WriteLine("Creating network.");

            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\scaling.net"))
            {
                net.PrintConnections();
                net.PrintParameters();
                Console.WriteLine("Testing network.");
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        net.ScaleInput(data.GetTrainInput((uint)i));
                        calc_out = net.Run(data.GetTrainInput((uint)i));
                        net.DescaleOutput(calc_out);
                        Console.WriteLine("Result {0} original {1} error {2}", calc_out[0], data.OutputAccessor[i][0],
                                          FannAbs(calc_out[0] - data.OutputAccessor[i][0]));
                    }
                    Console.ReadKey();
                }
            }
        }
Example #29
0
        ///<summary>
        ///Only for classification, called by Function "CorrectClassified"
        ///</summary>
        public bool evaluateTrainingSet(TrainingData d)
        {
            this.SetInputVector(d.input);
            this.Invalidate();
            List <double> o;

            o = this.GetOutputVector();

            int index = 0;
            int i     = 0;

            //searching for the index of the highest value in outputvector
            foreach (double v in o)
            {
                if (o.ElementAt(index) < v)
                {
                    index = i;
                }

                ++i;
            }

            //compare the index to the output vector of the trainings set
            //if the value at this index is 1 the classification was correct
            if (d.output.ElementAt(index) == 1.0)
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
Example #30
0
        public override void TestInitialize()
        {
            base.TestInitialize();
            coaches   = new TestRepository();
            entries   = new TestTimetableEntriesRepository();
            trainings = new TestTrainingsRepository();
            locations = new TestLocationsRepository();
            types     = new TestTrainingTypesRepository();
            data      = GetRandom.Object <CoachData>();
            var c = new Coach(data);

            coaches.Add(c).GetAwaiter();
            trainingData = GetRandom.Object <TrainingData>();
            var t = new Training(trainingData);

            trainings.Add(t).GetAwaiter();
            trainingTypeData = GetRandom.Object <TrainingTypeData>();
            var tt = new TrainingType(trainingTypeData);

            types.Add(tt).GetAwaiter();
            locationData = GetRandom.Object <LocationData>();
            var l = new Location(locationData);

            locations.Add(l).GetAwaiter();
            obj = new TestClass(coaches, entries, trainings, locations, types);
        }
Example #31
0
        public void Save(string fileName)
        {
            TrainingData data = new TrainingData();

            data.LayerNumber = 2;

            data.Rows    = new int[grads.Length];
            data.Columns = new int[grads.Length];

            Matrix matrix = affineLayer01.w;

            data.Rows[0]    = matrix.X;
            data.Columns[0] = matrix.Y;
            data.Content.Add(matrix);

            matrix          = affineLayer01.b;
            data.Rows[1]    = matrix.X;
            data.Columns[1] = matrix.Y;
            data.Content.Add(matrix);

            matrix          = affineLayer02.w;
            data.Rows[2]    = matrix.X;
            data.Columns[2] = matrix.Y;
            data.Content.Add(matrix);

            matrix          = affineLayer02.b;
            data.Rows[3]    = matrix.X;
            data.Columns[3] = matrix.Y;
            data.Content.Add(matrix);

            data.SaveAsync(fileName);
        }
Example #32
0
 static int PrintCallback(NeuralNet net, TrainingData train, uint max_epochs, uint epochs_between_reports,
                          float desired_error, uint epochs, Object user_data)
 {
     Console.WriteLine(String.Format("Epochs     " + String.Format("{0:D}", epochs).PadLeft(8) +
                                     ". Current Error: " + net.MSE));
     return(0);
 }
Example #33
0
        static void Main()
        {
            const uint num_input = 3;
            const uint num_output = 1;
            const uint num_layers = 4;
            const uint num_neurons_hidden = 5;
            const float desired_error = 0.0001F;
            const uint max_epochs = 5000;
            const uint epochs_between_reports = 1000;
            using(NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_neurons_hidden, num_output))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.LINEAR;
                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP;
                using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\scaling.data"))
                {
                    net.SetScalingParams(data, -1, 1, -1, 1);
                    net.ScaleTrain(data);

                    net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);
                    net.Save("..\\..\\..\\datasets\\scaling.net");

                    Console.ReadKey();
                }
            }
        }
Example #34
0
        static void Main()
        {
            const uint  num_layers         = 3;
            const uint  num_neurons_hidden = 96;
            const float desired_error      = 0.001F;


            using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\robot.train"))
                using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\robot.test"))
                {
                    for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F)
                    {
                        Console.WriteLine("============= momentum = {0} =============\n", momentum);
                        using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount))
                        {
                            net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;

                            net.LearningMomentum = momentum;

                            net.TrainOnData(trainData, 20000, 5000, desired_error);

                            Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData));
                            Console.WriteLine("MSE error on test data: {0}", net.TestData(testData));
                        }
                    }
                }
            Console.ReadKey();
        }
Example #35
0
        private void WorkerTraining_DoWork(object sender, DoWorkEventArgs e)
        {
            // Executes when RunWorkerAsync() is called

            List <Training> dataReceived = new TrainingData().GetAllEmployeeTraining(); //change for other all

            e.Result = dataReceived;
        }
Example #36
0
        public static Training Create(TrainingView v)
        {
            var d = new TrainingData();

            Copy.Members(v, d);

            return(new Training(d));
        }
Example #37
0
        public static Domain.Training.Training Create(TrainingView view)
        {
            var d = new TrainingData();

            Copy.Members(view, d);

            return(new Domain.Training.Training(d));
        }
Example #38
0
        static int Main(string[] args)
        {
            int ret = 0;
            #if FANN_FIXED
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_fixed.net"))
            #else
            using (NeuralNet net = new NeuralNet("..\\..\\..\\examples\\xor_float.net"))
            #endif
            {
                net.PrintConnections();
                net.PrintParameters();

                Console.WriteLine("Testing network.");

                using (TrainingData data = new TrainingData())
                {
            #if FANN_FIXED
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor_fixed.data"))
            #else
                    if (!data.ReadTrainFromFile("..\\..\\..\\examples\\xor.data"))
            #endif
                    {
                        Console.WriteLine("Error reading training data --- ABORTING.\n");
                        return -1;
                    }
                    for (int i = 0; i < data.TrainDataLength; i++)
                    {
                        net.ResetMSE();
                        DataType[] calc_out = net.Test(data.GetTrainInput((uint)i).Array, data.GetTrainOutput((uint)i).Array);
            #if FANN_FIXED
                        Console.WriteLine("XOR test ({0}, {1}) - {2}, should be {3}, difference={4}",
                                            data.InputAccessor[i][0], data.InputAccessor[i][0],
                                            calc_out[0], data.OutputAccessor[i][0],
                                            (float) fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier);

                        if ((float)fann_abs(calc_out[0] - data.OutputAccessor[i][0]) / net.Multiplier > 0.2)
                        {
                            Console.WriteLine("Test failed");
                            ret = -1;
                        }
            #else

                        Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference={4}",
                            data.GetTrainInput((uint)i)[0],
                            data.GetTrainInput((uint)i)[1],
                            calc_out[0],
                            data.GetTrainOutput((uint)i)[0],
                            calc_out[0] - data.GetTrainOutput((uint)i)[0]);
            #endif

                    }
                    Console.WriteLine("Cleaning up.");
                }
            }
            Console.ReadKey();
            return ret;
        }
Example #39
0
 public void AttachData(IList<TrainingData> trainingData)
 {
     data = trainingData;
     currentData = data.First(x => x.UserScore == null);
     currentPosition = data.IndexOf(currentData);
     totalData = data.Count;
     progressBar.Maximum = totalData;
     progressBar.Value = currentPosition;
     currentPicture = new Bitmap(currentData.ImagePath).ConvertToGrayScale();
 }
Example #40
0
        static void Main(string[] args)
        {
            DataType[] calc_out;
            const uint num_input = 2;
            const uint num_output = 1;
            const uint num_layers = 3;
            const uint num_neurons_hidden = 3;
            const float desired_error =  0;
            const uint max_epochs = 1000;
            const uint epochs_between_reports = 10;

            int decimal_point;

            Console.WriteLine("Creating network.");
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output))
            using (TrainingData data = new TrainingData("..\\..\\..\\examples\\xor.data"))
            {
                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC;

                net.TrainStopFunction = StopFunction.STOPFUNC_BIT;
                net.BitFailLimit = 0.01F;

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_RPROP;

                net.InitWeights(data);

                Console.WriteLine("Training network.");
                net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);

                Console.WriteLine("Testing network");
                // Keep a copy of the inputs and outputs so that we don't call TrainingData.Input
                // and TrainingData.Output multiple times causing a copy of all the data on each
                // call. An alternative is to use the Input/OutputAccessors which are fast with
                // repeated calls to get data and can be cast to arrays with the Array property
                DataType[][] input = data.Input;
                DataType[][] output = data.Output;
                for (int i = 0; i < data.TrainDataLength; i++)
                {
                    calc_out = net.Run(input[i]);
                    Console.WriteLine("XOR test ({0},{1}) -> {2}, should be {3}, difference={4}",
                                        input[i][0], input[i][1], calc_out[0], output[i][0],
                                        FannAbs(calc_out[0] - output[i][0]));
                }

                Console.WriteLine("Saving network.\n");

                net.Save("..\\..\\..\\examples\\xor_float.net");

                decimal_point = net.SaveToFixed("..\\..\\..\\examples\\xor_fixed.net");
                data.SaveTrainToFixed("..\\..\\..\\examples\\xor_fixed.data", (uint)decimal_point);

                Console.ReadKey();
            }
        }
Example #41
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.00007F;

            using (TrainingData trainData = new TrainingData())
            using (TrainingData testData = new TrainingData())
            {
                trainData.CreateTrainFromCallback(374, 48, 3, TrainingDataCallback);
                testData.CreateTrainFromCallback(594, 48, 3, TestDataCallback);

                // Test Accessor classes
                for (int i = 0; i < trainData.TrainDataLength; i++)
                {
                    Console.Write("Input {0}: ", i);
                    for (int j = 0; j < trainData.InputCount; j++)
                    {
                        Console.Write("{0}, ", trainData.InputAccessor[i][j]);
                    }
                    Console.Write("\nOutput {0}: ", i);
                    for (int j = 0; j < trainData.OutputCount; j++)
                    {
                        Console.Write("{0}, ", trainData.OutputAccessor[i][j]);
                    }
                    Console.WriteLine("");
                }

                for (float momentum = 0.0F; momentum < 0.7F; momentum += 0.1F)
                {
                    Console.WriteLine("============= momentum = {0} =============\n", momentum);
                    using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, trainData.InputCount, num_neurons_hidden, trainData.OutputCount))
                    {
                        net.SetCallback(TrainingCallback, "Hello!");

                        net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;

                        net.LearningMomentum = momentum;

                        net.TrainOnData(trainData, 20000, 500, desired_error);

                        Console.WriteLine("MSE error on train data: {0}", net.TestData(trainData));
                        Console.WriteLine("MSE error on test data: {0}", net.TestData(testData));
                    }

                }
            }
            Console.ReadKey();
        }
Example #42
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 32;
            const float desired_error = 0.0001F;
            const uint max_epochs = 300;
            const uint epochs_between_reports = 10;

            Console.WriteLine("Creating network.");
            using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount))
            {
                Console.WriteLine("Training network.");

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);

                Console.WriteLine("Testing network.");

                using (TrainingData testData = new TrainingData())
                {
                    testData.ReadTrainFromFile("..\\..\\..\\datasets\\mushroom.test");
                    net.ResetMSE();
                    for (int i = 0; i < testData.TrainDataLength; i++)
                    {
                        // The difference between calling GetTrain[Input|Output] and calling
                        // the Input and Output properties is huge in terms of speed
                        net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array);
                    }

                    Console.WriteLine("MSE error on test data {0}", net.MSE);

                    Console.WriteLine("Saving network.");

                    net.Save("..\\..\\..\\examples\\mushroom_float.net");

                    Console.ReadKey();
                }

            }
        }
Example #43
0
        private static void TrainOnSteepnessFile(NeuralNet net, string filename,
            uint max_epochs, uint epochs_between_reports,
            float desired_error, float steepness_start,
            float steepness_step, float steepness_end)
        {
            float error;
            using (TrainingData data = new TrainingData())
            {
                data.ReadTrainFromFile(filename);

                if (epochs_between_reports != 0)
                {
                    Console.WriteLine("Max epochs {0}. Desired error: {1}", max_epochs.ToString("00000000"), desired_error.ToString("0.0000000000"));
                }

                net.ActivationSteepnessHidden = steepness_start;
                net.ActivationSteepnessOutput = steepness_start;
                for (int i = 1; i <= max_epochs; i++)
                {
                    error = net.TrainEpoch(data);

                    if(epochs_between_reports != 0 && (i % epochs_between_reports == 0 || i == max_epochs || i == 1 || error < desired_error))
                    {
                        Console.WriteLine("Epochs     {0}. Current error: {1}", i.ToString("00000000"), error.ToString("0.0000000000"));
                    }

                    if(error < desired_error)
                    {
                        steepness_start += steepness_step;
                        if(steepness_start <= steepness_end)
                        {
                            Console.WriteLine("Steepness: {0}", steepness_start);
                            net.ActivationSteepnessHidden = steepness_start;
                            net.ActivationSteepnessOutput = steepness_start;
                        }
                        else
                        {
                            break;
                        }
                    }
                }
            }
        }
Example #44
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 96;
            const float desired_error = 0.001F;

            Console.WriteLine("Creating network.");

            using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\robot.train"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount))
            using (TrainingData testData = new TrainingData())
            {
                Console.WriteLine("Training network.");

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_INCREMENTAL;
                net.LearningMomentum = 0.4F;

                net.TrainOnData(data, 3000, 10, desired_error);

                Console.WriteLine("Testing network.");
                testData.ReadTrainFromFile("..\\..\\..\\datasets\\robot.test");
                try
                {
                    net.ResetMSE();
                    for (int i = 0; i < testData.TrainDataLength; i++)
                    {
                        net.Test(testData.GetTrainInput((uint)i).Array, testData.GetTrainOutput((uint)i).Array);
                    }
                    Console.WriteLine("MSE error on test data: {0}", net.MSE);

                    Console.WriteLine("Saving network.");

                    net.Save("..\\..\\..\\datasets\\robot_float.net");
                }
                catch (Exception e)
                {
                    Console.WriteLine("Exception: {0}", e.Message);
                }
                Console.ReadKey();
            }
        }
Example #45
0
        static void Main(string[] args)
        {
            TrainingData[] xor=new TrainingData[] {
                new TrainingData(new double[] {1,0}, new double[] {1}),
                new TrainingData(new double[] {0,1}, new double[] {1}),
                new TrainingData(new double[] {1,1}, new double[] {0}),
                new TrainingData(new double[] {0,0}, new double[] {0}),
            };

            int[] layers = new int[] { 2,2,1 };
            MultilayerPerceptron mlp=new MLPGenerator().Create(layers,1,new Sigmoid());
            mlp.Inspector=new SignalInspector();

            //shows the neurons processing in VS Output window
            mlp.Inspector.Debug=true;
            mlp.Inspector.TraceLevel=3;
            mlp.LearnFactor=0.9;
            var response=mlp.BP(new BPRequest(xor,1000));

            Console.ReadKey();
        }
Example #46
0
        static void Main()
        {
            const uint num_input = 2;
            const uint num_output = 1;
            const uint num_layers = 3;
            const uint num_neurons_hidden = 3;
            const float desired_error = 0.001F;
            const uint max_epochs = 500000;
            const uint epochs_between_reports = 1000;
            DataType[] calc_out;

            using (TrainingData data = new TrainingData("..\\..\\..\\examples\\xor.data"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_neurons_hidden, num_output))
            {

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC;

                net.TrainingAlgorithm = TrainingAlgorithm.TRAIN_QUICKPROP;

                TrainOnSteepnessFile(net, "..\\..\\..\\examples\\xor.data", max_epochs, epochs_between_reports, desired_error, 1.0F, 0.1F, 20.0F);

                net.ActivationFunctionHidden = ActivationFunction.THRESHOLD_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.THRESHOLD_SYMMETRIC;

                for(int i = 0; i != data.TrainDataLength; i++)
                {
                    calc_out = net.Run(data.GetTrainInput((uint)i));
                    Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference={4}",
                                        data.InputAccessor[i][0], data.InputAccessor[i][1], calc_out[0], data.OutputAccessor[i][0],
                                        FannAbs(calc_out[0] - data.OutputAccessor[i][0]));
                }

                net.Save("..\\..\\..\\examples\\xor_float.net");

                Console.ReadKey();
            }
        }
Example #47
0
 static int PrintCallback(NeuralNet net, TrainingData train, uint max_epochs, uint epochs_between_reports, float desired_error, uint epochs, Object user_data)
 {
     Console.WriteLine(String.Format("Epochs     " + String.Format("{0:D}", epochs).PadLeft(8) + ". Current Error: " +
                       String.Format("{0:F}", net.MSE).PadRight(8)));
     return 0;
 }
        public TrainingData OnTraining()
        {
            if (termeltEgysegekSzama == 4 && playerCount == 2) return null;
            if (playerCount <= 3 && myInfo.Money < 100 && !myInfo.Researched.Contains("írás")) return null;
            if (myUnits.Count > 20) return null;
            if (myInfo.Money >= 50)
            {
                TrainingData cmd = new TrainingData();
                cmd.UnitTypeName = "talpas";
                cmd.PositionX = myCities.ElementAt(0).PositionX;
                cmd.PositionY = myCities.ElementAt(0).PositionY;
                myInfo.Money -= 50;
                return cmd;
            }

            return null;
        }
        // Test function that demonstrates usage of the fann C++ wrapper
        private static void xor_test()
        {
            System.Console.WriteLine("XOR test started.");

            const float LearningRate = 0.7f;
            const uint numInput = 2;
            const uint numHidden = 3;
            const uint numOutput = 1;
            const float desired_error = 0;
            const uint max_iterations = 1000;
            const uint iterations_between_reports = 10;

            System.Console.WriteLine("Creating network.");

            NeuralNet net = new NeuralNet();

            List<uint> layers = new List<uint>();
            layers.Add(numInput);
            layers.Add(numHidden);
            layers.Add(numOutput);

            net.CreateStandardArray(layers.ToArray());

            net.SetLearningRate(LearningRate);

            net.SetActivationSteepnessHidden(1.0);
            net.SetActivationSteepnessOutput(1.0);

            net.SetActivationFunctionHidden(ActivationFunction.SigmoidSymmetric);
            net.SetActivationFunctionOutput(ActivationFunction.SigmoidSymmetric);

            net.SetTrainStopFunction(StopFunction.Bit);
            net.SetBitFailLimit(0.01f);
            // Set additional properties such as the training algorithm
            //net.SetTrainingAlgorithm(FANN::TRAIN_QuickProp);

            // Output network type and parameters
            System.Console.WriteLine("Network Type                         :  ");
            switch (net.GetNetworkType())
            {
                case NetworkType.Layer:
                    System.Console.WriteLine("LAYER");
                    break;
                case NetworkType.ShortCut:
                    System.Console.WriteLine("SHORTCUT");
                    break;
                default:
                    System.Console.WriteLine("UNKNOWN");
                    break;
            }
            net.PrintParameters();

            System.Console.WriteLine("Training network.");

            TrainingData data = new TrainingData();
            bool d = data.ReadTrainFromFile("xor.data");
            if (d)
            {
                // Initialize and train the network with the data
                net.InitWeights(data);

                System.Console.WriteLine("Max Epochs " + max_iterations + ". "
                    + "Desired Error: " + desired_error);

                net.Callback += (nn, train, max_epochs, epochs_between_reports, de, epochs)
                    =>
                    {
                        System.Console.WriteLine("Epochs     " + epochs + ". " + "Current Error: " + nn.GetMSE() + "\n");
                        return 0;
                    };

                net.TrainOnData(data, max_iterations,
                    iterations_between_reports, desired_error);

                System.Console.WriteLine("Testing network.");

                for (uint i = 0; i < data.TrainingDataLength; ++i)
                {
                    // Run the network on the test data
                    double calcOut = net.Run(data.Input[i])[0];

                    System.Console.WriteLine("XOR test (" + data.Input[i][0] + ", "
                         + data.Input[i][1] + ") -> " + calcOut
                         + ", should be " + data.Output[i][0] + ", "
                         + "difference = "
                         + Math.Abs(calcOut - data.Output[i][0]));
                }

                System.Console.WriteLine("Saving network.");

                // Save the network in floating point and fixed point
                net.Save("xor_float.net");
                uint decimal_point = (uint)net.SaveToFixed("xor_fixed.net");
                data.SaveTrainToFixed("xor_fixed.data", decimal_point);

                System.Console.WriteLine("XOR test completed.");

            }
            else
                Console.WriteLine("Training file not found");
        }
        /// <summary>
        /// Metoda tworzenia i uczenia sieci neuronowych.
        /// </summary>
        /// <param name="maxMaxEpochs">Maksymalna liczba epok.</param>
        /// <param name="minMaxEpochs">Minimalna liczba epok.</param>
        /// <param name="maxEpochsMultiplierStep">Krok mnożący liczby epok dla kolejnego rodzaju sieci. </param>
        /// <param name="minHiddenLayersMultiplier">Minimalny mnożnik dla warstw ukrytych.</param>
        /// <param name="maxHiddenLayersMultiplier">Maksymalny mnożnik dla wartw ukrytych.</param>
        /// <param name="hiddenLayersMultiplierStep">Krok dodający dla mnożnika warstw ukrytych.</param>
        /// <param name="methodProgressPart">Wartość (0 - 100) określająca jaka część postępu inicjalizacji należy do tej metody.</param>
        private void CreateNetworks(
            string path,
            string trainDataFolder,
            string testDataFolder,
            int maxMaxEpochs,
            int minMaxEpochs,
            double maxEpochsMultiplierStep,
            double minHiddenLayersMultiplier,
            double maxHiddenLayersMultiplier,
            double hiddenLayersMultiplierStep,
            double methodProgressPart,
            double desiredMSE
            )
        {
            double methodStartProgress = this.InitializationProgress;
            List<TrainingDataFileParameters> fileList = DataProvider.Instance.GetTrainingDataFilesParameters(trainDataFolder);
            List<NeuralNetworkParameters> parameters = new List<NeuralNetworkParameters>();
            foreach (TrainingDataFileParameters file in fileList)
            {
                for (double i = minMaxEpochs; i <= maxMaxEpochs; i *= maxEpochsMultiplierStep)
                {
                    for (double j = minHiddenLayersMultiplier; j <= maxHiddenLayersMultiplier; j += hiddenLayersMultiplierStep)
                    {
                        parameters.Add(new NeuralNetworkParameters()
                        {
                            fileParameters = file,
                            hiddenLayersMultiplier = j,
                            maxEpochs = (uint)i
                        });
                    }
                }
            }

            int numberOfNetworksToCreate = parameters.Count;
            int numberOfCreatedNetworks = 0;
            NetworkMSE lowestNetworkMSE = null;

            try
            {
                StringReader reader = new StringReader(File.ReadAllText(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\" + path + "LowestMSENetwork.xml"));
                XmlSerializer serializer = new XmlSerializer(typeof(NetworkMSE));
                lowestNetworkMSE = (NetworkMSE)serializer.Deserialize(reader);
            }
            catch
            {
                lowestNetworkMSE = null;
            }

            //small hack
            if (lowestNetworkMSE != null && lowestNetworkMSE.MSE < desiredMSE)
            {
                this.InitializationProgress = 100;
                return;
            }

            string[] testFiles = Directory.GetFiles(testDataFolder);
            List<TrainingData> testDataList = new List<TrainingData>();
            foreach (string testFile in testFiles)
            {
                TrainingData td = new TrainingData();
                //if (td.ReadTrainFromFile(testDataFolder + "\\" + Path.GetFileName(testFile)))
                if (td.ReadTrainFromFile(testFile))
                {
                    testDataList.Add(td);
                }
            }

            List<TrainingData> trainingDataList = new List<TrainingData>();
            foreach (TrainingDataFileParameters file in fileList)
            {
                TrainingData td = new TrainingData();
                if (td.ReadTrainFromFile(trainDataFolder.Split('\\').Last() + "\\" + file.FileName))
                {
                    trainingDataList.Add(td);
                }
            }

            string initStatus = this.InitializationStatus;
            Directory.CreateDirectory(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\" + path);
            List<Task> taskList = new List<Task>();
            NeuralNet.CallbackType[] callbacksArray = new NeuralNet.CallbackType[this.MaxComputingThreads];
            Semaphore threadProgressSemaphore = new Semaphore(1, 1);
            Semaphore allSem = new Semaphore(1, 1);
            TrainingData[] threadDataVars = new TrainingData[this.MaxComputingThreads];
            for (int i = 1; i <= this.MaxComputingThreads; i++)
            {
                int taskNumber = i;
                threadDataVars[i - 1] = new TrainingData();

                Task t = new Task((Action)(
                    () =>
                    {
                        while (true)
                        {
                            allSem.WaitOne();
                            if (parameters.Count == 0)
                            {
                                this.InitializationStatus = initStatus + " " + numberOfNetworksToCreate.ToString() + " / " + numberOfNetworksToCreate.ToString();
                                this.InitializationProgress = 100;
                                break;
                            }
                            else
                            {
                                NeuralNetworkParameters usedParameters = parameters.First();
                                parameters.RemoveAt(0);
                                if (!File.Exists(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\" + path + usedParameters.FileName))
                                {
                                    NeuralNet net = new NeuralNet();
                                    List<uint> layers = new List<uint>();
                                    layers.Add((uint)((usedParameters.fileParameters.NumberOfPeriods * 2) - 1)); // inputs
                                    layers.Add((uint)(layers[0] * usedParameters.hiddenLayersMultiplier)); // hidden
                                    layers.Add(3); // output
                                    net.CreateStandardArray(layers.ToArray());
                                    net.SetLearningRate((float)0.7);
                                    net.SetActivationFunctionHidden(ActivationFunction.SigmoidSymmetric);
                                    net.SetActivationFunctionOutput(ActivationFunction.SigmoidSymmetric);

                                    net.Callback += callbacksArray[taskNumber - 1];
                                    threadDataVars[taskNumber - 1] = trainingDataList.Find((e) => ((e.NumInputTrainData == layers[0]) && (e.Input.Length == usedParameters.fileParameters.NumberOfPatterns)));
                                    allSem.Release();
                                    net.TrainOnData(threadDataVars[taskNumber - 1],
                                            usedParameters.maxEpochs, // max iterations
                                            0,// iterations between report
                                            0 //desired error
                                            );
                                    allSem.WaitOne();
                                    net.TestData(testDataList.Find((e) => e.NumInputTrainData == layers[0]));
                                    double mse = net.GetMSE();
                                    if (lowestNetworkMSE == null || lowestNetworkMSE.MSE > mse)
                                    {
                                        lowestNetworkMSE = new NetworkMSE()
                                        {
                                            MSE = mse,
                                            NetworkFileName = usedParameters.FileName
                                        };

                                        StringWriter writer = new StringWriter();

                                        XmlSerializer serializer = new XmlSerializer(typeof(NetworkMSE));
                                        serializer.Serialize(writer, lowestNetworkMSE);
                                        File.WriteAllText(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\" + path + "LowestMSENetwork.xml", writer.ToString());
                                    }

                                    net.Save(path + usedParameters.FileName);
                                }
                                this.InitializationStatus = initStatus + " " + numberOfCreatedNetworks.ToString() + " / " + numberOfNetworksToCreate.ToString();
                                numberOfCreatedNetworks++;
                                this.InitializationProgress = (numberOfCreatedNetworks * methodProgressPart / numberOfNetworksToCreate) + methodStartProgress;

                            }
                            allSem.Release();
                        }
                    }
                    ), TaskCreationOptions.LongRunning
                    );
                taskList.Add(t);
            }

            foreach (Task t in taskList)
            {
                t.Start();
            }
        }
Example #51
0
        static void Main()
        {
            const float desired_error = 0.0F;
            uint max_neurons = 30;
            uint neurons_between_reports = 1;
            uint bit_fail_train, bit_fail_test;
            float mse_train, mse_test;
            DataType[] output;
            DataType[] steepness = new DataType[1];
            int multi = 0;
            ActivationFunction[] activation = new ActivationFunction[1];
            TrainingAlgorithm training_algorithm = TrainingAlgorithm.TRAIN_RPROP;

            Console.WriteLine("Reading data.");

            using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\parity8.train"))
            using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\parity8.test"))
            {
                trainData.ScaleTrainData(-1, 1);
                testData.ScaleTrainData(-1, 1);

                Console.WriteLine("Creating network.");

                using (NeuralNet net = new NeuralNet(NetworkType.SHORTCUT, 2, trainData.InputCount, trainData.OutputCount))
                {
                    net.TrainingAlgorithm = training_algorithm;
                    net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                    net.ActivationFunctionOutput = ActivationFunction.LINEAR;
                    net.TrainErrorFunction = ErrorFunction.ERRORFUNC_LINEAR;

                    if (multi == 0)
                    {
                        steepness[0] = 1;
                        net.CascadeActivationSteepnesses = steepness;

                        activation[0] = ActivationFunction.SIGMOID_SYMMETRIC;

                        net.CascadeActivationFunctions = activation;
                        net.CascadeCandidateGroupsCount = 8;
                    }

                    if (training_algorithm == TrainingAlgorithm.TRAIN_QUICKPROP)
                    {
                        net.LearningRate = 0.35F;
                        net.RandomizeWeights(-2.0F, 2.0F);
                    }

                    net.BitFailLimit = (DataType)0.9;
                    net.TrainStopFunction = StopFunction.STOPFUNC_BIT;
                    net.PrintParameters();

                    net.Save("..\\..\\..\\examples\\cascade_train2.net");

                    Console.WriteLine("Training network.");

                    net.CascadetrainOnData(trainData, max_neurons, neurons_between_reports, desired_error);

                    net.PrintConnections();

                    mse_train = net.TestData(trainData);
                    bit_fail_train = net.BitFail;
                    mse_test = net.TestData(testData);
                    bit_fail_test = net.BitFail;

                    Console.WriteLine("\nTrain error: {0}, Train bit-fail: {1}, Test error: {2}, Test bit-fail: {3}\n",
                                      mse_train, bit_fail_train, mse_test, bit_fail_test);

                    for (int i = 0; i < trainData.TrainDataLength; i++)
                    {
                        output = net.Run(trainData.GetTrainInput((uint)i));
                        if ((trainData.GetTrainOutput((uint)i)[0] >= 0 && output[0] <= 0) ||
                            (trainData.GetTrainOutput((uint)i)[0] <= 0 && output[0] >= 0))
                        {
                            Console.WriteLine("ERROR: {0} does not match {1}", trainData.GetTrainOutput((uint)i)[0], output[0]);
                        }
                    }

                    Console.WriteLine("Saving network.");
                    net.Save("..\\..\\..\\examples\\cascade_train.net");

                    Console.ReadKey();
                }
            }
        }
Example #52
0
        static void Main()
        {
            const uint num_layers = 3;
            const uint num_neurons_hidden = 32;
            const float desired_error = 0.0001F;
            const uint max_epochs = 300;
            const uint epochs_between_reports = 10;
            long before;

            Console.WriteLine("Creating network.");
            using (TrainingData data = new TrainingData("..\\..\\..\\datasets\\mushroom.train"))
            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, data.InputCount, num_neurons_hidden, data.OutputCount))
            {
                Console.WriteLine("Training network.");

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID;

                net.TrainOnData(data, max_epochs, epochs_between_reports, desired_error);

                Console.WriteLine("Testing network.");

                using (TrainingData testData = new TrainingData())
                {
                    testData.ReadTrainFromFile("..\\..\\..\\datasets\\mushroom.test");
                    before = Environment.TickCount;
                    for (int i = 0; i < 5; i++)
                    {
                        DataType[] input = new DataType[testData.InputCount];
                        DataType[] output = new DataType[testData.OutputCount];
                        for(int j = 0; j < testData.InputCount; j++)
                        {
                            input[j] = testData.InputAccessor[i][j];
                        }
                        for (int j = 0; j < testData.OutputCount; j++)
                        {
                            output[j] = testData.OutputAccessor[i][j];
                        }
                        net.Test(input, output);
                    }
                    Console.WriteLine("Data Accessor ticks {0}", Environment.TickCount - before);

                    before = Environment.TickCount;
                    for (int i = 0; i < 5; i++)
                    {
                        DataType[] input = new DataType[testData.InputCount];
                        DataType[] output = new DataType[testData.OutputCount];
                        for (int j = 0; j < testData.InputCount; j++)
                        {
                            input[j] = testData.Input[i][j];
                        }
                        for (int j = 0; j < testData.OutputCount; j++)
                        {
                            output[j] = testData.Output[i][j];
                        }
                        net.Test(input, output);
                    }
                    Console.WriteLine("Array ticks {0}", Environment.TickCount - before);
                }
                Console.WriteLine("MSE error on test data {0}", net.MSE);

                Console.WriteLine("Saving network.");

                net.Save("..\\..\\..\\examples\\mushroom_float.net");

                Console.ReadKey();
            }
        }
Example #53
0
        private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
        {
            words2pos = new Dictionary<string, long>();
            wordsOrder = new List<string>();
            outputValues = new Dictionary<string, long>();
            totalRows = 0;
            Invoke(new MethodInvoker(
                 delegate
                 {
                     button1.Enabled = false;
                     tabControl1.Enabled = false;
                 }
                 ));

            Tuple<string, string> arg = (Tuple<string, string>)e.Argument;

            OleDbConnection myOleDbConnection = null;
            OleDbCommand myOleDbCommand = null;
            OleDbDataReader myOleDbDataReader = null;
            StreamWriter outputFile = null;
            StreamWriter dictOutputFile = null;
            StreamWriter catOutputFile = null;

            try
            {
                string connectionString = "provider=Microsoft.Jet.OLEDB.4.0;data source=" + arg.Item1;
                myOleDbConnection = new OleDbConnection(connectionString);
                myOleDbCommand = myOleDbConnection.CreateCommand();

                // TOP 100
                myOleDbCommand.CommandText = "SELECT TOP " + (int)numericUpDown3.Value + " Recept, Content, ReceptDescription,DishType FROM tblMain";
                myOleDbConnection.Open();
                myOleDbDataReader = myOleDbCommand.ExecuteReader();
                int i = 0;
                while (myOleDbDataReader.Read())
                {
                    i++;
                    prepareTrainingData((string)myOleDbDataReader["Content"], (string)myOleDbDataReader["DishType"], 1, null);
                    if (i % 541 == 0)
                    {
                        report("First pass: "******"SELECT TOP " + (int)numericUpDown3.Value + " Recept, Content, ReceptDescription,DishType FROM tblMain";
                myOleDbDataReader = myOleDbCommand.ExecuteReader();
                outputFile = new StreamWriter(File.OpenWrite(arg.Item2));
                outputFile.WriteLine(totalRows + " " + wordsOrder.Count + " " + outputValues.Count);
                i = 0;
                while (myOleDbDataReader.Read())
                {
                    i++;
                    prepareTrainingData((string)myOleDbDataReader["Content"], (string)myOleDbDataReader["DishType"], 2, outputFile);
                    if (i % 541 == 0)
                    {
                        report("Second pass: "******"Dict and cat dump");
                dictOutputFile = new StreamWriter(File.OpenWrite(arg.Item2 + ".words.dict"));
                foreach (string word in wordsOrder)
                {
                    dictOutputFile.WriteLine(word);
                }
                catOutputFile = new StreamWriter(File.OpenWrite(arg.Item2 + ".words.cat"));
                foreach (string val in outputValues.OrderBy(x => x.Value).Select(x => x.Key))
                {
                    catOutputFile.WriteLine(val);
                }

                report("Creating network");
                NeuralNet net = new NeuralNet();
                net.SetActivationFunctionHidden(ActivationFunction.SigmoidSymmetric);
                net.Callback += new NeuralNet.CallbackType(fannProgress);
                uint[] layers = textBox5.Text.Split(new char[] { ',' }).Select(x => UInt32.Parse(x.Trim())).ToArray();
                net.CreateStandardArray(layers);

                TrainingData data = new TrainingData();
                outputFile.Close();
                report("Reading data");
                data.ReadTrainFromFile(arg.Item2);
                report("Doing training");
                net.TrainOnData(data, (uint)numericUpDown1.Value, 10, (float)numericUpDown2.Value);

                net.Save(arg.Item2 + ".ann");
                report("Done training. Saved.");
            }
            finally
            {
                if (myOleDbDataReader != null)
                    myOleDbDataReader.Close();
                if (myOleDbCommand != null)
                    myOleDbCommand.Cancel();
                if (myOleDbConnection != null)
                    myOleDbConnection.Close();
                if (outputFile != null)
                    outputFile.Close();
                if (dictOutputFile != null)
                    dictOutputFile.Close();
                if (catOutputFile != null)
                    catOutputFile.Close();
            }
        }
Example #54
0
        //Metodo que recibe un numero correspondiente a un dato y lo carga como input
        protected void cargarNumeroComoArrayOutput(int number,ref TrainingData td)
        {
            PatternProcessingHelper patternHelper = new PatternProcessingHelper();
            ArrayList arr = patternHelper.ArrayListFromNumber(number, 9);
            int i;

            for (i = 0; i <= 8; i++)
            {
                td.Outputs.Add(arr[i]);
            }
        }
        public TrainingData OnTraining()
        {
            if (turn > 70) return null;
            if (myUnits.Count > myCities.Count * 2 && turn > 20) return null;
            if (myInfo.Money >= 50 && myUnits.Count < 1)        // train first unit
            {
                CityInfo city = myCities.ElementAt(0);
                TrainingData cmd = new TrainingData();
                cmd.PositionX = city.PositionX;
                cmd.PositionY = city.PositionY;
                cmd.UnitTypeName = (myInfo.Researched.Contains("íjászat")) ? "íjász" : "talpas";
                myInfo.Money -= 50;
                myUnits.Add(new UnitInfo() { MovementPoints = 2, Owner = PlayerName, PositionX = city.PositionX, PositionY = city.PositionY, UnitTypeName = "talpas" });
                return cmd;
            }

            if (myInfo.Money >= 100)
            {
                int varos = -1;
                for (int i = 0; i < myCities.Count; i++)
                {
                    int unitCount = 0;
                    CityInfo myCity = myCities.ElementAt(i);
                    for (int j = 0; j < myUnits.Count; j++)
                    {
                        if (myUnits.ElementAt(j).PositionX == myCity.PositionX && myUnits.ElementAt(j).PositionY == myCity.PositionY) unitCount++;
                    }

                    int surrounding = 0;
                    for (int x = Math.Max(0, myCity.PositionX - 3); x < Math.Min(14, myCity.PositionX + 3); x++)
                    {
                        for (int y = Math.Max(0, myCity.PositionY - 3); y < Math.Min(14, myCity.PositionY); y++)
                        {
                            int enemyCount = 0;
                            for (int j = 0; j < enemyUnits.Count; j++)
                            {
                                UnitInfo enemy = enemyUnits.ElementAt(j);
                                if (enemy.PositionX == x && enemy.PositionY == y) enemyCount++;
                            }
                            surrounding += enemyCount;
                        }
                    }

                    if (unitCount < Math.Max(2, surrounding))
                    {
                        varos = i;
                        break;
                    }
                }

                if (varos == -1)
                    return null;

                CityInfo city = myCities.ElementAt(varos);
                TrainingData cmd = new TrainingData();
                cmd.PositionX = city.PositionX;
                cmd.PositionY = city.PositionY;
                cmd.UnitTypeName = "lovag";
                myInfo.Money -= 100;
                myUnits.Add(new UnitInfo() { MovementPoints = 2, Owner = PlayerName, PositionX = city.PositionX, PositionY = city.PositionY, UnitTypeName = "lovag" });
                int lovagCtr = 0;
                for (int i = 0; i < myUnits.Count; i++)
                {
                    if (myUnits.ElementAt(i).UnitTypeName == "lovag") lovagCtr++;
                }
                return cmd;
            }

            if (myInfo.Money >= 50)
            {
                int varos = -1;
                for (int i = 0; i < myCities.Count; i++)
                {
                    int unitCount = 0;
                    CityInfo myCity = myCities.ElementAt(i);
                    for (int j = 0; j < myUnits.Count; j++)
                    {
                        if (myUnits.ElementAt(j).PositionX == myCity.PositionX && myUnits.ElementAt(j).PositionY == myCity.PositionY) unitCount++;
                    }

                    int surrounding = 0;
                    for (int x = Math.Max(0, myCity.PositionX - 3); x < Math.Min(14, myCity.PositionX + 3); x++)
                    {
                        for (int y = Math.Max(0, myCity.PositionY-3); y < Math.Min(14, myCity.PositionY); y++)
                        {
                            int enemyCount = 0;
                            for (int j = 0; j < enemyUnits.Count; j++)
                            {
                                UnitInfo enemy = enemyUnits.ElementAt(j);
                                if (enemy.PositionX == x && enemy.PositionY == y) enemyCount++;
                            }
                            surrounding += enemyCount;
                        }
                    }

                    if (unitCount < Math.Max(2,surrounding))
                    {
                        varos = i;
                        break;
                    }
                }

                if (varos == -1)
                    return null;

                CityInfo city = myCities.ElementAt(varos);
                TrainingData cmd = new TrainingData();
                cmd.PositionX = city.PositionX;
                cmd.PositionY = city.PositionY;
                cmd.UnitTypeName = (myInfo.Researched.Contains("íjászat")) ? "íjász" : "talpas";
                myInfo.Money -= 50;
                myUnits.Add(new UnitInfo() { MovementPoints = 2, Owner = PlayerName, PositionX = city.PositionX, PositionY = city.PositionY, UnitTypeName = "talpas" });
                return cmd;
            }
            return null;
        }
Example #56
0
        static void XorTest()
        {
            Console.WriteLine("\nXOR test started.");

            const float learning_rate = 0.7f;
            const uint num_layers = 3;
            const uint num_input = 2;
            const uint num_hidden = 3;
            const uint num_output = 1;
            const float desired_error = 0.001f;
            const uint max_iterations = 300000;
            const uint iterations_between_reports = 1000;

            Console.WriteLine("\nCreating network.");

            using (NeuralNet net = new NeuralNet(NetworkType.LAYER, num_layers, num_input, num_hidden, num_output))
            {
                net.LearningRate = learning_rate;

                net.ActivationSteepnessHidden = 1.0F;
                net.ActivationSteepnessOutput = 1.0F;

                net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC_STEPWISE;
                net.ActivationFunctionOutput = ActivationFunction.SIGMOID_SYMMETRIC_STEPWISE;

                // Output network type and parameters
                Console.Write("\nNetworkType                         :  ");
                switch (net.NetworkType)
                {
                    case NetworkType.LAYER:
                        Console.WriteLine("LAYER");
                        break;
                    case NetworkType.SHORTCUT:
                        Console.WriteLine("SHORTCUT");
                        break;
                    default:
                        Console.WriteLine("UNKNOWN");
                        break;
                }
                net.PrintParameters();

                Console.WriteLine("\nTraining network.");

                using (TrainingData data = new TrainingData())
                {
                    if (data.ReadTrainFromFile("..\\..\\..\\examples\\xor.data"))
                    {
                        // Initialize and train the network with the data
                        net.InitWeights(data);

                        Console.WriteLine("Max Epochs " + String.Format("{0:D}", max_iterations).PadLeft(8) + ". Desired Error: " + String.Format("{0:F}", desired_error).PadRight(8));
                        net.SetCallback(PrintCallback, null);
                        net.TrainOnData(data, max_iterations, iterations_between_reports, desired_error);

                        Console.WriteLine("\nTesting network.");

                        for (uint i = 0; i < data.TrainDataLength; i++)
                        {
                            // Run the network on the test data
                            DataType[] calc_out = net.Run(data.Input[i]);

                            Console.WriteLine("XOR test ({0}, {1}) -> {2}, should be {3}, difference = {4}",
                                data.InputAccessor[(int)i][0].ToString("+#;-#"),
                                data.InputAccessor[(int)i][1].ToString("+#;-#"),
                                calc_out[0] == 0 ? 0.ToString() : calc_out[0].ToString("+#.#####;-#.#####"),
                                data.OutputAccessor[(int)i][0].ToString("+#;-#"),
                                FannAbs(calc_out[0] - data.Output[i][0]));
                        }

                        Console.WriteLine("\nSaving network.");

                        // Save the network in floating point and fixed point
                        net.Save("..\\..\\..\\examples\\xor_float.net");
                        uint decimal_point = (uint)net.SaveToFixed("..\\..\\..\\examples\\xor_fixed.net");
                        data.SaveTrainToFixed("..\\..\\..\\examples\\xor_fixed.data", decimal_point);

                        Console.WriteLine("\nXOR test completed.");
                    }
                }
            }
        }
Example #57
0
 static int TrainingCallback(NeuralNet net, TrainingData data, uint maxEpochs, uint epochsBetweenReports, float desiredError, uint epochs, object userData)
 {
     System.GC.Collect(); // Make sure nothing's getting garbage-collected prematurely
     GC.WaitForPendingFinalizers();
     Console.WriteLine("Callback: Last neuron weight: {0}, Last data input: {1}, Max epochs: {2}\nEpochs between reports: {3}, Desired error: {4}, Current epoch: {5}\nGreeting: \"{6}\"",
                         net.ConnectionArray[net.TotalConnections - 1].Weight, data.InputAccessor.Get((int)data.TrainDataLength - 1, (int)data.InputCount - 1),
                         maxEpochs, epochsBetweenReports, desiredError, epochs, userData);
     return 1;
 }
Example #58
0
        private void button4_Click(object sender, EventArgs e)
        {
            int fileCount = 0;

            for (int i = 1; i < 10; i++)
            {
                DirectoryInfo di = new DirectoryInfo(@"TrainNeural\\" + i.ToString());
                FileInfo[] bmpFiles = di.GetFiles("*.png");
                fileCount += bmpFiles.Length;
            }

            double[,] input = new double[fileCount, 480];
            double[,] output = new double[fileCount, 10];

            List<string> hashes = new List<string>();
            int count = 0;

            for (int i = 1; i < 10; i++)
            {
                DirectoryInfo di = new DirectoryInfo(@"TrainNeural\\" + i.ToString());
                FileInfo[] bmpFiles = di.GetFiles("*.png");

                foreach (FileInfo fi in bmpFiles)
                {
                    string hash = FileHashSum(fi.FullName);

                    if (hashes.Contains(hash))
                        continue;

                    hashes.Add(hash);

                    Bitmap bmp = new Bitmap(fi.FullName);

                    bmp = ImageProcessor.Binarization(bmp, ImageProcessor.OtsuThreshold(bmp));
                    ImageProcessor.GetNumericView(bmp, ref input, count);

                    for (int j = 1; j < 10; j++)
                    {
                        if (j == i)
                            output[count, j - 1] = 1;
                        else
                            output[count, j - 1] = 0;
                    }

                    count++;
                }
            }

            if (File.Exists("TrainingData.tr"))
                File.Delete("TrainingData.tr");

            string fillTrainFile = count.ToString() + " 480 9" + Environment.NewLine;

            for (int i = 0; i < count; i++)
            {
                for (int x = 0; x < 480; x++)
                {
                    fillTrainFile += input[i, x].ToString();
                    if (x < 479)
                        fillTrainFile += " ";
                    else
                        fillTrainFile += Environment.NewLine;
                }

                for (int x = 0; x < 9; x++)
                {
                    fillTrainFile += output[i, x].ToString();
                    if (x < 8)
                       fillTrainFile += " ";
                    else
                        fillTrainFile += Environment.NewLine;
                }

                if (i % 40 == 0)
                {
                    File.AppendAllText("TrainingData.tr", fillTrainFile);
                    fillTrainFile = String.Empty;
                }
            }

            File.AppendAllText("TrainingData.tr", fillTrainFile);

            NeuralNet neuralNet = new NeuralNet();

            uint[] layers = { 480, 190, 9 };
            neuralNet.CreateStandardArray(layers);

            neuralNet.RandomizeWeights(-0.1, 0.1);
            neuralNet.SetLearningRate(0.7f);

            TrainingData trainingData = new TrainingData();
            trainingData.ReadTrainFromFile("TrainingData.tr");

            switch (comboBox1.SelectedIndex)
            {
                case 0:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.1f);
                    break;
                case 1:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.05f);
                    break;
                case 2:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.01f);
                    break;
                case 3:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.005f);
                    break;
                case 4:
                    neuralNet.TrainOnData(trainingData, 1000, 0, 0.001f);
                    break;
            }

            neuralNet.Save("NeuralNet.ann");

            renewNeural = true;
        }
Example #59
0
        protected TrainingData GetATrainningData(String linea)
        {
            TrainingData td = new TrainingData();
            int i;
            for (i = 0; i <= 3; i++)
            {
                try
                {
                    if (!linea.Split(';')[i].Equals(""))
                    {
                        cargarNumeroComoArrayInput(Convert.ToInt32(linea.Split(';')[i]), ref td);
                    }
                }
                catch (FormatException e)
                {
                    Console.WriteLine("Input string is not a sequence of digits.");
                }
                catch (OverflowException e)
                {
                    Console.WriteLine("The number cannot fit in an Int32.");
                }

            }
            cargarNumeroComoArrayOutput(Convert.ToInt32(linea.Split(';')[4]), ref td);
            return td;
        }
Example #60
0
 int fannProgress(NeuralNet net, TrainingData train, uint maxEpochs, uint epochsBetweenReports, float desiredError, uint epochs)
 {
     report("Training: epoch " + epochs + " of " + maxEpochs);
     return 0;
 }