Exemple #1
0
        static void Main(string[] args)
        {
            InputLoader loader = new InputLoader();

            loader.LoadFile("digits.csv");
            Label                       l;
            int                         i          = 0;
            DigitRecognizer             recognizer = new DigitRecognizer();
            Dictionary <string, double> parameters = new Dictionary <string, double>()
            {
                { "NumberOfModels", 100 },
                { "MaxNumberOfOperations", 100 },
                { "Width", 28 },
                { "Height", 28 },
            };

            recognizer.ResetModels(parameters);
            while (true)
            {
                i = i % 25000;
                if (i == 0)
                {
                    i++;
                }
                var a = loader.AccessElement(i, out l);
                recognizer.SetContext(a);
                recognizer.SetLabel(l);
                var output = recognizer.Test();
                recognizer.Train();
                i++;
            }
        }
Exemple #2
0
        static void Main(string[] args)
        {
            InputLoader loader = new InputLoader();

            loader.LoadFile("digits.csv");
            Stopwatch sw = new Stopwatch();

            var heursiticDetection = new HeuristicDetection(10, 5, quantity: 50, numberOfPoints: 500);
            var hypothesis         = new CurrentHypothesis();

            foreach (var input in loader.AllElements())
            {
                ///For every new input we extract n points of interest
                ///And create a feature vector which characterizes the spatial relationship between these features
                ///For every heuristic we get a dictionary of points of interest
                DetectedPoints v = heursiticDetection.getFeatureVector(input.Item1);

                ///Compare this feature vector agaist each of the other feature vectors we know about
                sw.Reset();
                sw.Start();
                TestResult r = hypothesis.Predict(v);
                Debug.Print("Prediction: " + sw.Elapsed.Milliseconds.ToString());
                var best = r.BestResult();
                if (best != null && best.Item2 != 0)
                {
                    LogProgress(best.Item1, input.Item2);
                }

                sw.Reset();
                sw.Start();
                hypothesis.Train(v, input.Item2, r);
                Debug.Print("Training: " + sw.Elapsed.Milliseconds.ToString());
                //heursiticDetection.pointsOfInterest.Add(HeuristicDetection.Generate(10, 5, 10));
            }
        }
Exemple #3
0
 public MainWindow()
 {
     Logger.Inst.Deserialize(@"..\..\..\Logger\TrialParams.xml");
     InitializeComponent();
     bw = new BackgroundWorker();
     bw.DoWork += bw_DoWork;
     loader = new InputLoader();
     loader.LoadFile(@"C:\Users\Amichai\Data\digits.csv");
     workbench = new Workbench();
     //Listen to all the events here
     this.GridRoot.DataContext = this;
     //workbench.InputLoaded += workbench_InputLoaded;
     workbench.FeaturesTrained += workbench_FeaturesTrained;
     this.TrialParams.Text = Logger.Inst.SerializeParams().ToString();
     this.OutputFile.Text = "Output Filepath: " + Logger.Inst.OutputFilePath();
     Logger.Inst.logFile.SetColumns("TimeStamp", "Number of trials", "Success rate (last 100)", "Feautre count", "Average Attractiveness", "Average Interestingness", "Average number of points", "Average number of data seen", "Max attractiveness", "Max interestingeness");
     bw.RunWorkerAsync();
     //this.FeautresList.ItemsSource = this.Features;
 }
Exemple #4
0
        static void Main2(string[] args)
        {
            double      purgeThreshold = .7;
            InputLoader loader         = new InputLoader();

            loader.LoadFile("digits.csv");
            StreamProcessor processor = new StreamProcessor(28, 28);

            //var count = processor.AddContextFeautres();
            //Debug.Print(count.ToString() + " context features added.");
            processor.GenerateRandomFeatures(1150);
            LinkedList <bool> rollingRightWrong = new LinkedList <bool>();
            int thresholdIdx = 2;
            int correct      = 0;
            int i            = 1;

            //for (int i = 1; i < 25000; i++) {
            while (true)
            {
                i = i % 25000;

                //Debug.Print(i.ToString());
                Label l;
                var   a = loader.AccessElement(i, out l);
                processor.SetNextFeautreContext(a, l);
                var output = processor.Predict();
                processor.Train();
                var best = output.BestResult();
                if (best != null && best.Item2 != 0)
                {
                    //Debug.Print(i.ToString() + "  " +
                    //    best.Item1.TextRepresentation + " "
                    //    + best.Item2.ToString());
                    //Debug.Print("Desired: " + processor.DataLabel.TextRepresentation);
                    bool guessedRight = processor.DataLabel.TextRepresentation == best.Item1.TextRepresentation;
                    rollingRightWrong.AddLast(guessedRight);
                    if (guessedRight)
                    {
                        correct++;
                    }
                    if (rollingRightWrong.Count() > 100)
                    {
                        if (rollingRightWrong.First())
                        {
                            correct--;
                        }
                        rollingRightWrong.RemoveFirst();
                    }
                }


                //if(processor.PurgeFeautres(purgeThreshold) > 1000) purgeThreshold+= .01;
                if (i % 400 == 0)
                {
                    Debug.Print("Idx: " + i.ToString() + " " + ((double)correct / 100).ToString());
                    processor.PrintUtil(thresholdIdx);
                    thresholdIdx += 2;
                    //string output2 = processor.DescribeAllFeatures();
                    //Debug.Print(output2);
                }
                i++;
            }
            //Get the ability to quickly serialize good heuristics for the future
        }