コード例 #1
0
        public float CompareFeature(RecognitionSample other, TextureFeatures feature)
        {
            if (other.imageFile != imageFile)
            {
                return(0);
            }

            int fragmentsCount    = 0;
            int fragmentsMathches = 0;

            foreach (var result in other.sampleResults)
            {
                ++fragmentsCount;
                var mineResult = GetAnswer(result.Region);
                if (mineResult != null)
                {
                    if (mineResult.CompareAnswers(result, feature))
                    {
                        ++fragmentsMathches;
                    }
                }
            }

            return(fragmentsMathches / (float)fragmentsCount);
        }
コード例 #2
0
        private void btnOpen_Click(object sender, EventArgs e)
        {
            if (openSample.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                sample = new RecognitionSample(TextureRecognition.Instance.Core);
                sample.LoadSample(openSample.FileName);
                pbSample.Image = sample.GetSampleImage();
            }

            lbTextureClasses.Items.Clear();
            var core = TextureRecognition.Instance.Core;
            for (int i = 0; i < core.TextureClassCount; ++i)
            {
                lbTextureClasses.Items.Add(core.GetTextureClass(i).Name);
            }
            lbTextureClasses.SelectedIndex = -1;
        }
コード例 #3
0
        public float CompareFeature(RecognitionSample other, TextureFeatures feature)
        {
            if (other.imageFile != imageFile)
            {
                return 0;
            }

            int fragmentsCount = 0;
            int fragmentsMathches = 0;
            foreach (var result in other.sampleResults)
            {
                ++fragmentsCount;
                var mineResult = GetAnswer(result.Region);
                if (mineResult != null)
                {
                    if (mineResult.CompareAnswers(result, feature))
                    {
                        ++fragmentsMathches;
                    }
                }
            }

            return fragmentsMathches / (float)fragmentsCount;
        }
コード例 #4
0
        private void tsmiOptions_Click(object sender, EventArgs e)
        {
            var result = RecognitionOptions.Instance.ShowDialog();
            Focus();

            if (result == System.Windows.Forms.DialogResult.OK)
            {
                coreC = coreD = coreF = null;
                SampleC = SampleD = SampleF = null;
                workImage = null;

                lbOutput.Items.Add("Настройки изменены");
                lbOutput.SelectedIndex = lbOutput.Items.Count - 1;
            }
        }
コード例 #5
0
 private void SelectSample(string file)
 {
     sample = new RecognitionSample(recognition.Core);
       sample.LoadSample(file);
       this.BeginInvoke(new Action(InvokeSelectSample));
       SampleC = SampleD = SampleF = null;
 }
コード例 #6
0
        private void RecognizeThread(object param)
        {
            var id = (int)param;

            RecognitionCore core = null;
            RecognitionSample currentSample = null;
            Bitmap currentImage = null;
            string pref = "";

            switch (id)
            {
                case 1:
                    pref = "    (Полная выборка)";
                    core = coreF;
                    currentSample = SampleF;
                    currentImage = workImage;
                    break;

                case 2:
                    pref = "    (Компактная выборка)";
                    core = coreC;
                    currentSample = SampleC;
                    currentImage = workImage;
                    break;

                case 3:
                    pref = "    (Разряженная выборка)";
                    core = coreD;
                    currentSample = SampleD;
                    currentImage = workImage;
                    break;
                default:
                    break;
            }

            if (core == null)
            {
                core = new RecognitionCore();
                for (int i = 0; i < recognition.Core.TextureClassCount; ++i)
                {
                    var their = recognition.Core.GetTextureClass(i);
                    core.AddTextureClass(their.Name, their.RegionColor);
                }

                for (int i = 0; i < core.TextureClassCount; ++i)
                {
                    var currentClass = core.GetTextureClass(i);

                    this.BeginInvoke(
                        new Action(delegate()
                        {
                            lbOutput.Items.Add("Начато обучение классу " + currentClass.Name);
                            lbOutput.SelectedIndex = lbOutput.Items.Count - 1;
                        }));

                    switch (id)
                    {
                        case 1:
                            currentClass.Teach(teachingSamples[i], GetImagesToTeach(teachingSamples[i]));
                            break;

                        case 2:
                            currentClass.TeachCompact(teachingSamples[i], GetImagesToTeach(teachingSamples[i]));
                            break;

                        case 3:
                            currentClass.TeachDischarged(teachingSamples[i], GetImagesToTeach(teachingSamples[i]));
                            break;

                        default:
                            currentClass.Teach(teachingSamples[i], GetImagesToTeach(teachingSamples[i]));
                            break;
                    }
                    while (currentClass.IsTeaching)
                    {
                        Thread.Sleep(200);
                    }

                    var result = this.BeginInvoke(
                        new Action(delegate()
                        {
                            var count = currentClass.KnownSamplesNumber(TextureFeatures.GLCM);
                            var part = count / (float)teachingSamples[i].Count;
                            lbOutput.Items.Add("    После обучения получено образцов GLCM: " +
                                count.ToString() + " (" + part.ToString() + ")");

                            count = currentClass.KnownSamplesNumber(TextureFeatures.LBP);
                            part = count / (float)teachingSamples[i].Count;
                            lbOutput.Items.Add("    После обучения получено образцов LBP: " +
                                count.ToString() + " (" + part.ToString() + ")");

                            lbOutput.SelectedIndex = lbOutput.Items.Count - 1;
                        }));
                    while (!result.IsCompleted)
                    {
                        Thread.Sleep(50);
                    }
                }
            }

            switch (id)
            {
                case 1:
                    coreF = core;
                    break;
                case 2:
                    coreC = core;
                    break;
                case 3:
                    coreD = core;
                    break;
                default:
                    break;
            }

            if (currentSample == null)
            {
                this.BeginInvoke(
                    new Action(delegate()
                    {
                        lbOutput.Items.Add("Начато распознавание тестового изображения");
                        lbOutput.SelectedIndex = lbOutput.Items.Count - 1;
                    }));

                currentSample = new RecognitionSample(core, sample.Path);
                currentSample.Recognize();
                currentImage = currentSample.GetSampleImage();
                switch (id)
                {
                    case 1:
                        SampleF = currentSample;
                        workImage = currentImage;
                        break;
                    case 2:
                        SampleC = currentSample;
                        workImage = currentImage;
                        break;
                    case 3:
                        SampleD = currentSample;
                        workImage = currentImage;
                        break;
                    default:
                        break;
                }
            }

            var iRes = this.BeginInvoke(
                new Action(delegate()
                {
                    lbOutput.Items.Add(currentSample.Path);
                    var result = sample.CompareFeature(currentSample, TextureFeatures.GLCM);
                    lbOutput.Items.Add(pref + " Соответствие GLCM = " + result.ToString());
                    result = sample.CompareFeature(currentSample, TextureFeatures.LBP);
                    lbOutput.Items.Add(pref + " Соответствие LBP = " + result.ToString());
                    lbOutput.SelectedIndex = lbOutput.Items.Count - 1;

                    pbWork.Image = currentImage;

                    msMenu.Enabled = true;
                    btnSelectSample.Enabled = true;
                    btnSelectSamples.Enabled = true;
                    cbTextureClass.Enabled = true;
                    btnCompact.Enabled = true;
                    btnDischarged.Enabled = true;
                    btnFull.Enabled = true;
                }));
            while (!iRes.IsCompleted)
            {
                Thread.Sleep(20);
            }

            complete = true;
        }
コード例 #7
0
        private void btnSelectSamples_Click(object sender, EventArgs e)
        {
            if (openImages.ShowDialog() == System.Windows.Forms.DialogResult.OK)
              {
            var index = cbTextureClass.SelectedIndex;
            teachingSamples[index].Clear();
            teachingSamples[index].AddRange(openImages.FileNames);
            lbOutput.Items.Add("Добавлено " + teachingSamples[index].Count + " примеров в класс " +
                recognition.Core.GetTextureClass(index).Name);

            coreC = coreD = coreF = null;
            SampleC = SampleD = SampleF = null;
            workImage = null;
              }
        }