public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            binaryClassifiers = new OctaveLogisticRegression[ActionList.Count];
            foreach (var act in ActionList) //action list must contain a consequtive list of integers starting from 1
            {
                int c = act.Value;
                binaryClassifiers[c - 1] = new OctaveLogisticRegression(c.ToString());

                List <double[]> newOutputInput = new List <double[]>();

                foreach (var d in outputInput)
                {
                    double[] p = new double[d.Length];
                    Array.Copy(d, p, d.Length);
                    p[0] = (p[0] == c) ? 1 : 0;
                    newOutputInput.Add(p);
                }

                binaryClassifiers[c - 1].Train(new EEGRecord(newOutputInput));
                if (this.Progress != null)
                {
                    this.Progress((100 / ActionList.Count) * c);
                }
            }
        }
예제 #2
0
        public void Process()
        {
            Console.WriteLine(DbSettings.fullpath);

            EEGRecordStorage s = new EEGRecordStorage();

            EEGRecord r = s.LoadModel("MLPdata");

            LdaMLP model = new LdaMLP();

            Console.WriteLine("Data loaded");

            //r.FeatureVectorsInputOutput.ForEach(p => p+= ((double)100));
            //List<double[]> modifiedData = new List<double[]>();
            //foreach (double[] vector in r.FeatureVectorsInputOutput)
            //{
            //    var v=vector.Where((p, index) => index > 0).Select(g => g * 100).ToArray();
            //    modifiedData.Add(v);
            //}
            //foreach (double[] vector in r.FeatureVectorsInputOutput)
            //{
            //    for (int p = 1; p < vector.Length; p++)
            //    {
            //        vector[p] /= 1000000000000;
            //    }
            //}

            for (int k = 0; k < 1; k++)
            {
                model.Train(new EEGRecord(r.FeatureVectorsOutputInput));
            }

            int i  = 0;
            int ok = 0;

            foreach (double[] vector in r.FeatureVectorsOutputInput)
            {
                i++;
                double[] input = new double[vector.Length - 1];

                Array.Copy(vector, 1, input, 0, vector.Length - 1);

                int result = model.Classify(input);

                if (result == vector[0])
                {
                    ok++;
                    Console.WriteLine("Result " + result + " Expected " + vector[0] + " OK");
                }
                else
                {
                    Console.WriteLine("Result " + result + " Expected " + vector[0]);
                }
            }

            Console.WriteLine(i);
            Console.WriteLine(ok);
            Console.ReadKey();
        }
        public ExperimentsWindow()
        {
            InitializeComponent();
            currentRecord = null;

            buttonStart.IsEnabled = true;

            taskQueue      = new Queue <Task>();
            statusBar.Text = "Ready.";
        }
        void mrd_ReocordSelected(EEGRecord record)
        {
            currentRecord = record;

            labelSelectedRecord.Visibility = Visibility.Visible;
            labelRecordName.Text           = record.Name;
            statusBar.Text = "Record loaded";

            foreach (var w in workflows)
            {
                w.SetRecord(currentRecord);
            }
        }
예제 #5
0
        public void Process()
        {
            Console.WriteLine(DbSettings.fullpath);

            EEGRecordStorage s = new EEGRecordStorage();

            EEGRecord r = s.LoadModel("MLPdata");

            Console.WriteLine("Data loaded");

            LdaSVM model = new LdaSVM();

            for (int k = 0; k < 1; k++)
            {
                model.Train(new EEGRecord(r.FeatureVectorsOutputInput));
            }

            int i  = 0;
            int ok = 0;

            foreach (double[] vector in r.FeatureVectorsOutputInput)
            {
                i++;
                double[] input = new double[vector.Length - 1];

                Array.Copy(vector, 1, input, 0, vector.Length - 1);

                int result = model.Classify(input);

                if (result == vector[0])
                {
                    ok++;
                    Console.WriteLine("Result " + result + " Expected " + vector[0] + " OK");
                }
                else
                {
                    Console.WriteLine("Result " + result + " Expected " + vector[0]);
                }
            }

            Console.WriteLine(i);
            Console.WriteLine(ok);
            Console.ReadKey();
        }
예제 #6
0
        public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            double[,] inputs = null;
            int[] outputs = null;
            Converters.Convert(outputInput, ref inputs, ref outputs);

            //output classes must be consecutive: 1,2,3 ...
            _lda = new LinearDiscriminantAnalysis(inputs, outputs);

            if (this.Progress != null)
            {
                this.Progress(10);
            }

            // Compute the analysis
            _lda.Compute();

            if (this.Progress != null)
            {
                this.Progress(35);
            }

            double[,] projection = _lda.Transform(inputs);

            // convert for NN format
            double[][] input2  = null;
            double[][] output2 = null;
            Converters.Convert(projection, outputs, ref input2, ref output2);

            // create neural network
            int dimensions   = projection.GetLength(1);
            int output_count = outputs.Max();

            _network = new ActivationNetwork(
                new SigmoidFunction(2),
                dimensions,    // inputs neurons in the network
                dimensions,    // neurons in the first layer
                output_count); // output neurons

            // create teacher
            BackPropagationLearning teacher = new BackPropagationLearning(_network);

            int ratio = 4;
            NNTrainDataIterator iter = new NNTrainDataIterator(ratio, input2, output2);

            //actual training
            while (iter.HasMore) //we do the training each time spliting the data to different 'train' and 'validate' sets
            {
                #region get new data
                double[][] trainDataInput;
                double[][] trainDataOutput;
                double[][] validateDataInput;
                double[][] validateDataOutput;

                iter.NextData(out trainDataInput, out trainDataOutput, out validateDataInput, out validateDataOutput);
                #endregion

                //validationSetError = CalculateError(validateDataInput, validateDataOutput);
                double old_val_error1 = 100002;
                double old_val_error2 = 100001;
                double new_val_error  = 100000;

                //We do the training over the 'train' set until the error of the 'validate' set start to increase.
                //This way we prevent overfitting.
                int count = 0;
                while (((old_val_error1 - new_val_error) > 0.001) || ((old_val_error2 - old_val_error1) > 0.001))
                {
                    count++;
                    RunEpoch(teacher, trainDataInput, trainDataOutput, true);

                    old_val_error2 = old_val_error1;
                    old_val_error1 = new_val_error;

                    new_val_error = CalculateError(validateDataInput, validateDataOutput);
                }

                if (this.Progress != null)
                {
                    this.Progress(35 + (iter.CurrentIterationIndex) * (65 / ratio));
                }
            }

            //now we have a model of a NN+LDA which we can use for classification
            if (this.Progress != null)
            {
                this.Progress(100);
            }
        }
예제 #7
0
        public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            double[,] inputs = null;
            int[] outputs = null;
            Converters.Convert(outputInput, ref inputs, ref outputs);

            //output classes must be consecutive: 1,2,3 ...
            _lda = new LinearDiscriminantAnalysis(inputs, outputs);

            if (this.Progress != null)
            {
                this.Progress(10);
            }

            // Compute the analysis
            _lda.Compute();

            if (this.Progress != null)
            {
                this.Progress(35);
            }

            double[,] projection = _lda.Transform(inputs);

            // convert for NN format
            double[][] input2  = null;
            int[]      output2 = null;
            Converters.Convert(projection, outputs, ref input2, ref output2);

            int dimensions   = projection.GetLength(1);
            int output_count = outputs.Max();

            // Create a new Linear kernel
            IKernel kernel = new Linear();

            // Create a new Multi-class Support Vector Machine with one input,
            //  using the linear kernel and for four disjoint classes.
            _machine = new MulticlassSupportVectorMachine(dimensions, kernel, output_count);

            // Create the Multi-class learning algorithm for the machine
            var teacher = new MulticlassSupportVectorLearning(_machine, input2, output2);

            // Configure the learning algorithm to use SMO to train the
            //  underlying SVMs in each of the binary class subproblems.
            teacher.Algorithm = (svm, classInputs, classOutputs, i, j) =>
                                new SequentialMinimalOptimization(svm, classInputs, classOutputs);

            // Run the learning algorithm
            double error = teacher.Run();

            if (this.Progress != null)
            {
                this.Progress(100);
            }
        }
예제 #8
0
        /// <summary>
        /// Will generate thetas to be set in the Hypotheis
        /// </summary>
        /// <param name="outputInput"></param>
        public override void Train(EEGRecord record)
        {
            if (!EEGRecordStorage.IsRecordValid(record))
            {
                throw new InvalidRecordException();
            }
            List <double[]> outputInput = record.FeatureVectorsOutputInput;

            //1. set data

            //set y from first value to be last one (more comfortable for Octave)
            foreach (double[] raw in outputInput)
            {
                if (raw[0] != 0 && raw[0] != 1)
                {
                    throw new Exception("y must be either 0 or 1");
                }
                Array.Reverse(raw);
            }
            if (this.Progress != null)
            {
                this.Progress(5);
            }

            string Xyfile = OctaveController.SaveTempFile(outputInput);

            if (this.Progress != null)
            {
                this.Progress(20);
            }
            //2. constuct script
            string script =             //"data = load('D:\\Work_anton\\anton_work\\Adastra\\data\\ex1data1.txt');\r\n"
                            "data = load('" + Xyfile + "');\r\n"
                            + "[m, n] = size(data);\r\n"
                            + "X = data(:, [1:n-1]); y = data(:, n);\r\n"
                            + "X = [ones(m, 1) X];\r\n"
                            + "initial_theta = zeros(n, 1);\r\n" //"initial_theta = zeros(n + 1, 1);\r\n"
                            + "[theta] = generateTheta(initial_theta,X,y)\r\n";

            OctaveController.NoGUI = true;
            OctaveController.FunctionSearchPath = AdastraConfig.GetBaseOctaveScriptPath() + @"LogisticRegression";
            string result = OctaveController.Execute(script);

            //3. Parse result to extact theta
            string[] values = result.Split("\n\n".ToCharArray());
            double   d      = 0;

            double[] thetas = (from s in values
                               where s != string.Empty && double.TryParse(s.Replace("\n", "").Replace(" ", ""), out d)
                               select d).ToArray();

            if (thetas.Length != outputInput[0].Length)
            {
                throw new Exception("Octave script returned wrong number of Thetas!");
            }

            hypothesis.SetTheta(thetas);
            //4. Clear temp files
            if (File.Exists(Xyfile))
            {
                File.Delete(Xyfile);
            }

            if (this.Progress != null)
            {
                this.Progress(100);
            }
        }