Exemple #1
0
        public static void GetSingleContour(Bitmap src, Point point, int id, Bitmap scene, out Bitmap result, out double area)
        {
            string adr = "temp.jpg";

            File.Delete(adr);
            src.Save(adr);
            IplImage image = Cv.LoadImage(adr, LoadMode.AnyColor);
            IplImage gray  = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);
            IplImage bin   = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);

            scene.Save(adr);
            IplImage dst = Cv.LoadImage(adr, LoadMode.AnyColor);

            Cv.CvtColor(image, gray, ColorConversion.RgbToGray);
            Cv.InRangeS(gray, 150, 255, bin);
            CvMemStorage    storage  = Cv.CreateMemStorage(0);
            CvSeq <CvPoint> contours = null;
            int             cont     = Cv.FindContours(bin, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxTC89KCOS, Cv.Point(0, 0));

            contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 0, true);
            double temp = 0;

            for (CvSeq <CvPoint> seq0 = contours; seq0 != null; seq0 = seq0.HNext)
            {
                if (Cv.PointPolygonTest(seq0, new CvPoint2D32f(point.X, point.Y), false) > 0 &&
                    Cv.ContourArea(seq0) > 1000 &&
                    Cv.ContourArea(seq0) < (image.Height * image.Width * 0.5))
                {
                    CvMoments moments = new CvMoments();
                    Cv.Moments(seq0, out moments, true);
                    int             xc = (int)(moments.M10 / moments.M00);
                    int             yc = (int)(moments.M01 / moments.M00);
                    CvConnectedComp comp;
                    if (id == 0)
                    {
                        Cv.FloodFill(dst, Cv.Point(point.X, point.Y), Cv.RGB(200, 0, 0), Cv.ScalarAll(10), Cv.ScalarAll(10), out comp, FloodFillFlag.FixedRange, null);
                    }
                    else
                    {
                        Cv.FloodFill(dst, Cv.Point(point.X, point.Y), Cv.RGB(0, 150, 50), Cv.ScalarAll(10), Cv.ScalarAll(10), out comp, FloodFillFlag.FixedRange, null);
                    }
                    dst.PutText(
                        id.ToString(),
                        Cv.Point(xc, yc),
                        new CvFont(FontFace.HersheySimplex, 2, 2, 1, 5, LineType.Link8),
                        CvColor.Black);
                    temp = Cv.ContourArea(seq0);
                }
            }
            result = dst.ToBitmap();
            area   = temp;
        }
Exemple #2
0
        public Moments()
        {
            // (1)画像を読み込む.3チャンネル画像の場合はCOIがセットされていなければならない
            using (IplImage srcImg = new IplImage(Const.ImageLenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            {
                if (srcImg.NChannels == 3 && srcImg.COI == 0)
                {
                    srcImg.COI = 1;
                }
                // (2)入力画像の3次までの画像モーメントを計算する
                CvMoments moments = new CvMoments(srcImg, false);
                srcImg.COI = 0;

                // (3)モーメントやHuモーメント不変量を,得られたCvMoments構造体の値を使って計算する.
                double      spatialMoment = moments.GetSpatialMoment(0, 0);
                double      centralMoment = moments.GetCentralMoment(0, 0);
                double      normCMoment   = moments.GetNormalizedCentralMoment(0, 0);
                CvHuMoments huMoments     = new CvHuMoments(moments);

                // (4)得られたモーメントやHuモーメント不変量を文字として画像に描画
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8))
                {
                    string[] text = new string[10];
                    text[0] = string.Format("spatial={0:F3}", spatialMoment);
                    text[1] = string.Format("central={0:F3}", centralMoment);
                    text[2] = string.Format("norm={0:F3}", spatialMoment);
                    text[3] = string.Format("hu1={0:F10}", huMoments.Hu1);
                    text[4] = string.Format("hu2={0:F10}", huMoments.Hu2);
                    text[5] = string.Format("hu3={0:F10}", huMoments.Hu3);
                    text[6] = string.Format("hu4={0:F10}", huMoments.Hu4);
                    text[7] = string.Format("hu5={0:F10}", huMoments.Hu5);
                    text[8] = string.Format("hu6={0:F10}", huMoments.Hu6);
                    text[9] = string.Format("hu7={0:F10}", huMoments.Hu7);

                    CvSize textSize = font.GetTextSize(text[0]);
                    for (int i = 0; i < 10; i++)
                    {
                        srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black);
                    }
                }

                // (5)入力画像とモーメント計算結果を表示,キーが押されたときに終了
                using (CvWindow window = new CvWindow("Image", WindowMode.AutoSize))
                {
                    window.ShowImage(srcImg);
                    Cv.WaitKey(0);
                }
            }
        }
Exemple #3
0
        public Moments()
        {
            using (IplImage srcImg = new IplImage(FilePath.Image.Lenna, LoadMode.AnyColor | LoadMode.AnyDepth))
            {
                if (srcImg.NChannels == 3 && srcImg.COI == 0)
                {
                    srcImg.COI = 1;
                }

                CvMoments moments = new CvMoments(srcImg, false);
                srcImg.COI = 0;

                double      spatialMoment = moments.GetSpatialMoment(0, 0);
                double      centralMoment = moments.GetCentralMoment(0, 0);
                double      normCMoment   = moments.GetNormalizedCentralMoment(0, 0);
                CvHuMoments huMoments     = new CvHuMoments(moments);

                // drawing
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 1.0, 1.0, 0, 2, LineType.Link8))
                {
                    string[] text = new string[10];
                    text[0] = string.Format("spatial={0:F3}", spatialMoment);
                    text[1] = string.Format("central={0:F3}", centralMoment);
                    text[2] = string.Format("norm={0:F3}", spatialMoment);
                    text[3] = string.Format("hu1={0:F10}", huMoments.Hu1);
                    text[4] = string.Format("hu2={0:F10}", huMoments.Hu2);
                    text[5] = string.Format("hu3={0:F10}", huMoments.Hu3);
                    text[6] = string.Format("hu4={0:F10}", huMoments.Hu4);
                    text[7] = string.Format("hu5={0:F10}", huMoments.Hu5);
                    text[8] = string.Format("hu6={0:F10}", huMoments.Hu6);
                    text[9] = string.Format("hu7={0:F10}", huMoments.Hu7);

                    CvSize textSize = font.GetTextSize(text[0]);
                    for (int i = 0; i < 10; i++)
                    {
                        srcImg.PutText(text[i], new CvPoint(10, (textSize.Height + 3) * (i + 1)), font, CvColor.Black);
                    }
                }

                using (var window = new CvWindow("Image", WindowMode.AutoSize))
                {
                    window.ShowImage(srcImg);
                    Cv.WaitKey(0);
                }
            }
        }
Exemple #4
0
        public static void GetAllObjects(Bitmap src, Point point, out Bitmap result, out ComplexObject etalon, out List <ComplexObject> allcontours)
        {
            allcontours = new List <ComplexObject>();
            etalon      = new ComplexObject();
            string adr = "temp.jpg";

            if (File.Exists(adr))
            {
                File.Delete(adr);
            }
            src.Save(adr);
            IplImage image = Cv.LoadImage(adr, LoadMode.AnyColor);

            if (File.Exists(adr))
            {
                File.Delete(adr);
            }
            IplImage gray = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);
            IplImage bin  = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);
            IplImage dst  = Cv.CloneImage(image);

            Cv.CvtColor(image, gray, ColorConversion.RgbToGray);
            Cv.InRangeS(gray, 150, 255, bin);
            CvMemStorage    storage  = Cv.CreateMemStorage(0);
            CvSeq <CvPoint> contours = null;
            int             cont     = Cv.FindContours(bin, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxTC89KCOS, Cv.Point(0, 0));

            contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 0, true);
            int id = 1;

            for (CvSeq <CvPoint> seq0 = contours; seq0 != null; seq0 = seq0.HNext)
            {
                if (Cv.ContourArea(seq0) > 1000 && Cv.ContourArea(seq0) < (image.Height * image.Width * 0.5))
                {
                    CvMoments moments = new CvMoments();
                    Cv.Moments(seq0, out moments, true);
                    double xc       = (moments.M10 / moments.M00);
                    double yc       = (moments.M01 / moments.M00);
                    double distance = Math.Sqrt(xc * xc + yc * yc);
                    if (Cv.PointPolygonTest(seq0, new CvPoint2D32f(point.X, point.Y), false) > 0)
                    {
                        etalon = new ComplexObject(seq0, new CvPoint2D32f(xc, yc), true, 0, Cv.ContourArea(seq0), distance);
                        Cv.DrawContours(dst, seq0, Cv.RGB(250, 0, 0), Cv.RGB(50, 250, 0), 0, -1, LineType.Link8);
                        dst.PutText(
                            "0",
                            Cv.Point((int)xc, (int)yc),
                            new CvFont(FontFace.HersheySimplex, 2, 2, 1, 5, LineType.Link8),
                            CvColor.Black);
                    }
                    else
                    {
                        allcontours.Add(new ComplexObject(seq0, new CvPoint2D32f(xc, yc), false, id, Cv.ContourArea(seq0), distance));
                        id++;
                    }
                }
            }
            allcontours.Sort(delegate(ComplexObject ob1, ComplexObject ob2)
            {
                return(ob1.Distance.CompareTo(ob2.Distance));
            });
            for (int i = 0; i < allcontours.Count; i++)
            {
                allcontours[i].Id = i + 1;
                Cv.DrawContours(dst, allcontours[i].Cont, Cv.RGB(0, 150, 50), Cv.RGB(50, 250, 0), 0, -1, LineType.Link8);
                dst.PutText(
                    allcontours[i].Id.ToString(),
                    allcontours[i].Center,
                    new CvFont(FontFace.HersheySimplex, 2, 2, 1, 5, LineType.Link8),
                    CvColor.Black);
            }
            allcontours.Sort(delegate(ComplexObject ob1, ComplexObject ob2)
            {
                return(ob1.Id.CompareTo(ob2.Id));
            });
            result = dst.ToBitmap();
        }
Exemple #5
0
        public static void GetAllObjects(Bitmap src, out Bitmap result, out List <BloodObjects> allObjects, string adrWEB)
        {
            allObjects = new List <BloodObjects>();
            string adr = string.Empty;

            try
            {
                adr = "temp.jpg";
                File.Delete(adr);
                src.Save(adr);
            }
            catch (System.Runtime.InteropServices.ExternalException)
            {
                adr = adrWEB;
                File.Delete(adr);
                src.Save(adr);
            }
            IplImage image = Cv.LoadImage(adr, LoadMode.AnyColor);
            IplImage gray  = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);
            IplImage bin   = Cv.CreateImage(Cv.GetSize(image), BitDepth.U8, 1);
            IplImage dst   = Cv.CloneImage(image);

            Cv.CvtColor(image, gray, ColorConversion.RgbToGray);
            Cv.InRangeS(gray, 150, 255, bin);
            CvMemStorage    storage  = Cv.CreateMemStorage(0);
            CvSeq <CvPoint> contours = null;
            int             cont     = Cv.FindContours(bin, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxTC89KCOS, Cv.Point(0, 0));

            contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 0, true);
            int id = 0;

            for (CvSeq <CvPoint> seq0 = contours; seq0 != null; seq0 = seq0.HNext)
            {
                if (Cv.ContourArea(seq0) > 100 && Cv.ContourArea(seq0) < (image.Height * image.Width * 0.5))
                {
                    CvMoments moments = new CvMoments();
                    Cv.Moments(seq0, out moments, true);
                    double xc       = (moments.M10 / moments.M00);
                    double yc       = (moments.M01 / moments.M00);
                    double distance = Math.Sqrt(xc * xc + yc * yc);
                    allObjects.Add(new BloodObjects(
                                       id,
                                       seq0,
                                       seq0.ToList(),
                                       Cv.ContourArea(seq0),
                                       new CvPoint2D32f(xc, yc),
                                       distance,
                                       Math.Pow(Cv.ContourPerimeter(seq0), 2) / Cv.ContourArea(seq0),
                                       Cv.ContourPerimeter(seq0),
                                       Group.Interest));
                    id++;
                }
            }
            allObjects = Classified(allObjects);
            allObjects.Sort(delegate(BloodObjects ob1, BloodObjects ob2)
            {
                return(ob1.Distance.CompareTo(ob2.Distance));
            });
            for (int i = 0; i < allObjects.Count; i++)
            {
                allObjects[i].Id = i;
                if (allObjects[i].Group == Group.Interest)
                {
                    Cv.DrawContours(dst, allObjects[i].Contour, Cv.RGB(0, 250, 0), Cv.RGB(50, 250, 0), 0, -1, LineType.Link8);
                }
                else if (allObjects[i].Group == Group.Small)
                {
                    Cv.DrawContours(dst, allObjects[i].Contour, Cv.RGB(0, 0, 250), Cv.RGB(50, 250, 0), 0, -1, LineType.Link8);
                }
                else
                {
                    Cv.DrawContours(dst, allObjects[i].Contour, Cv.RGB(250, 0, 0), Cv.RGB(50, 250, 0), 0, -1, LineType.Link8);
                }
                dst.PutText(
                    allObjects[i].Id.ToString(),
                    allObjects[i].Center,
                    new CvFont(FontFace.HersheySimplex, 0.4, 0.4, 0.5, 1, LineType.Link8),
                    CvColor.Black);
            }
            allObjects.Sort(delegate(BloodObjects ob1, BloodObjects ob2)
            {
                return(ob1.Id.CompareTo(ob2.Id));
            });
            result = dst.ToBitmap();
        }
Exemple #6
0
        private void task()
        {
            Camera         camera              = Camera.GetInstance();
            MotorControler mc                  = MotorControler.GetInstance(parameterManager);
            Vector3        CurrentPoint        = mc.GetPoint();
            Vector3        p                   = new Vector3();
            int            BinarizeThreshold   = 10;
            int            BrightnessThreshold = 7;
            Mat            sum                 = Mat.Zeros(440, 512, MatType.CV_8UC1);

            string       datfileName = string.Format(@"c:\img\{0}.dat", System.DateTime.Now.ToString("yyyyMMdd_HHmmss_fff"));
            BinaryWriter writer      = new BinaryWriter(File.Open(datfileName, FileMode.Create));

            for (int i = 0; i < 10; i++)
            {
                byte[] b = camera.ArrayImage;
                writer.Write(b);
                p = mc.GetPoint();
                Mat mat = new Mat(440, 512, MatType.CV_8U, b);
                mat.ImWrite(String.Format(@"c:\img\{0}_{1}_{2}_{3}.bmp",
                                          System.DateTime.Now.ToString("yyyyMMdd_HHmmss_fff"),
                                          (int)(p.X * 1000),
                                          (int)(p.Y * 1000),
                                          (int)(p.Z * 1000)));
                Cv2.GaussianBlur(mat, mat, Cv.Size(3, 3), -1);
                Mat gau = mat.Clone();
                Cv2.GaussianBlur(gau, gau, Cv.Size(31, 31), -1);
                Cv2.Subtract(gau, mat, mat);
                Cv2.Threshold(mat, mat, BinarizeThreshold, 1, ThresholdType.Binary);
                Cv2.Add(sum, mat, sum);
                mc.MoveDistance(-0.003, VectorId.Z);
                mc.Join();
            }

            Cv2.Threshold(sum, sum, BrightnessThreshold, 1, ThresholdType.Binary);

            //Cv2.FindContoursをつかうとAccessViolationExceptionになる(Release/Debug両方)ので、C-API風に書く
            using (CvMemStorage storage = new CvMemStorage())
            {
                using (CvContourScanner scanner = new CvContourScanner(sum.ToIplImage(), storage, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple))
                {
                    //string fileName = string.Format(@"c:\img\{0}.txt",
                    //        System.DateTime.Now.ToString("yyyyMMdd_HHmmss_fff"));
                    string fileName = string.Format(@"c:\img\u.txt");

                    foreach (CvSeq <CvPoint> c in scanner)
                    {
                        CvMoments mom = new CvMoments(c, false);
                        if (c.ElemSize < 2)
                        {
                            continue;
                        }
                        if (mom.M00 == 0.0)
                        {
                            continue;
                        }
                        double mx = mom.M10 / mom.M00;
                        double my = mom.M01 / mom.M00;
                        File.AppendAllText(fileName, string.Format("{0:F} {1:F}\n", mx, my));
                    }
                }
            }

            sum *= 255;
            sum.ImWrite(String.Format(@"c:\img\{0}_{1}_{2}.bmp",
                                      System.DateTime.Now.ToString("yyyyMMdd_HHmmss_fff"),
                                      (int)(p.X * 1000),
                                      (int)(p.Y * 1000)));


            Vector2 encoderPoint = new Vector2(-1, -1);

            encoderPoint.X = mc.GetPoint().X;
            encoderPoint.Y = mc.GetPoint().Y;//おこられたのでしかたなくこうする 吉田20150427
            Vector2 viewerPoint = new Vector2(-1, -1);

            if (TigerPatternMatch.PatternMatch(ref viewerPoint))
            {
                encoderPoint = coordManager.TransToEmulsionCoord(viewerPoint);
                mc.MovePointXY(encoderPoint);
                mc.Join();
            }
        }
Exemple #7
0
        static OpenCvSharp.CPlusPlus.Point TrackDetection(List <Mat> mats, int px, int py, int shiftx = 2, int shifty = 2, int shiftpitch = 4, int windowsize = 40, int phthresh = 5, bool debugflag = false)
        {
            int x0 = px - 256;
            int y0 = py - 220;

            List <rawmicrotrack> rms = new List <rawmicrotrack>();

            // Point2d pixel_cen = TrackDetection(binimages, 256, 220, 3, 3, 4, 90, 3);


            int counter = 0;

            for (int ax = -shiftx; ax <= shiftx; ax++)
            {
                for (int ay = -shifty; ay <= shifty; ay++)
                {
                    using (Mat big = Mat.Zeros(600, 600, MatType.CV_8UC1))
                        using (Mat imgMask = Mat.Zeros(big.Height, big.Width, MatType.CV_8UC1))
                        {
                            //make the size of mask
                            int ystart = big.Height / 2 + y0 - windowsize / 2;
                            int yend   = big.Height / 2 + y0 + windowsize / 2;
                            int xstart = big.Width / 2 + x0 - windowsize / 2;
                            int xend   = big.Width / 2 + x0 + windowsize / 2;

                            //make mask as shape of rectangle. by use of opencv
                            OpenCvSharp.CPlusPlus.Rect recMask = new OpenCvSharp.CPlusPlus.Rect(xstart, ystart, windowsize, windowsize);
                            Cv2.Rectangle(imgMask, recMask, 255, -1);//brightness=1, fill

                            for (int p = 0; p < mats.Count; p++)
                            {
                                int startx = big.Width / 2 - mats[p].Width / 2 + (int)(p * ax * shiftpitch / 8.0);
                                int starty = big.Height / 2 - mats[p].Height / 2 + (int)(p * ay * shiftpitch / 8.0);
                                Cv2.Add(
                                    big[starty, starty + mats[p].Height, startx, startx + mats[p].Width],
                                    mats[p],
                                    big[starty, starty + mats[p].Height, startx, startx + mats[p].Width]);
                            }

                            using (Mat big_c = big.Clone())
                            {
                                Cv2.Threshold(big, big, phthresh, 255, ThresholdType.ToZero);
                                Cv2.BitwiseAnd(big, imgMask, big);

                                //Mat roi = big[ystart, yend , xstart, xend];//メモリ領域がシーケンシャルにならないから輪郭抽出のときに例外が出る。

                                if (debugflag == true)
                                {//
                                    //bigorg.ImWrite(String.Format(@"{0}_{1}_{2}.png",counter,ax,ay));
                                    //Mat roiwrite = roi.Clone() * 30;
                                    //roiwrite.ImWrite(String.Format(@"roi_{0}_{1}_{2}.png", counter, ax, ay));
                                    Cv2.Rectangle(big_c, recMask, 255, 1);//brightness=1, fill
                                    Cv2.ImShow("big_cx30", big_c * 30);
                                    Cv2.ImShow("bigx30", big * 30);
                                    //Cv2.ImShow("imgMask", imgMask);
                                    //Cv2.ImShow("roi", roi * 30);
                                    Cv2.WaitKey(0);
                                }
                            }//using big_c

                            using (CvMemStorage storage = new CvMemStorage())
                                using (CvContourScanner scanner = new CvContourScanner(big.ToIplImage(), storage, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple))
                                {
                                    foreach (CvSeq <CvPoint> c in scanner)
                                    {
                                        CvMoments mom = new CvMoments(c, false);
                                        if (c.ElemSize < 2)
                                        {
                                            continue;
                                        }
                                        if (mom.M00 < 1.0)
                                        {
                                            continue;
                                        }
                                        double        mx = mom.M10 / mom.M00;
                                        double        my = mom.M01 / mom.M00;
                                        rawmicrotrack rm = new rawmicrotrack();
                                        rm.ax = ax;
                                        rm.ay = ay;
                                        rm.cx = (int)(mx - big.Width / 2);
                                        rm.cy = (int)(my - big.Height / 2);
                                        rm.pv = (int)(mom.M00);
                                        rms.Add(rm);
                                        //Console.WriteLine(string.Format("{0}   {1} {2}   {3} {4}", rm.pv, ax, ay, rm.cx, rm.cy ));
                                    }
                                }//using contour

                            //big_c.Dispose();

                            counter++;
                        }//using Mat
                }//ay
            }//ax



            OpenCvSharp.CPlusPlus.Point trackpos = new OpenCvSharp.CPlusPlus.Point(0, 0);
            if (rms.Count > 0)
            {
                rawmicrotrack rm     = new rawmicrotrack();
                double        meancx = 0;
                double        meancy = 0;
                double        meanax = 0;
                double        meanay = 0;
                double        meanph = 0;
                double        meanpv = 0;
                double        sumpv  = 0;

                for (int i = 0; i < rms.Count; i++)
                {
                    meanpv += rms[i].pv * rms[i].pv;
                    meancx += rms[i].cx * rms[i].pv;
                    meancy += rms[i].cy * rms[i].pv;
                    meanax += rms[i].ax * rms[i].pv;
                    meanay += rms[i].ay * rms[i].pv;
                    sumpv  += rms[i].pv;
                }

                meancx /= sumpv;//重心と傾きを輝度値で重み付き平均
                meancy /= sumpv;
                meanax /= sumpv;
                meanay /= sumpv;
                meanpv /= sumpv;

                trackpos = new OpenCvSharp.CPlusPlus.Point(
                    (int)(meancx) + 256 - meanax * shiftpitch,
                    (int)(meancy) + 220 - meanay * shiftpitch
                    );

                double anglex = (meanax * shiftpitch * 0.267) / (3.0 * 7.0 * 2.2);
                double angley = (meanay * shiftpitch * 0.267) / (3.0 * 7.0 * 2.2);
                Console.WriteLine(string.Format("{0:f4} {1:f4}", anglex, angley));
            }
            else
            {
                trackpos = new OpenCvSharp.CPlusPlus.Point(-1, -1);
            }


            return(trackpos);
        }//track detection
Exemple #8
0
        private void task()
        {
            TracksManager  tm                = parameterManager.TracksManager;
            Track          myTrack           = tm.GetTrack(tm.TrackingIndex);
            MotorControler mc                = MotorControler.GetInstance(parameterManager);
            Camera         camera            = Camera.GetInstance();
            List <Mat>     image_set         = new List <Mat>();
            List <Mat>     image_set_reverse = new List <Mat>();

            Surface surface   = Surface.GetInstance(parameterManager);//表面認識から境界値を取得
            double  uptop     = surface.UpTop;
            double  upbottom  = surface.UpBottom;
            double  lowtop    = surface.LowTop;
            double  lowbottom = surface.LowBottom;

            double now_x = mc.GetPoint().X;
            double now_y = mc.GetPoint().Y;
            double now_z = mc.GetPoint().Z;


            common_dx = myTrack.MsDX + ((0.265625 * over_dx * 3) / (0.024 * 2.2 * 1000));
            common_dy = myTrack.MsDY - ((0.265625 * over_dy * 3) / (0.024 * 2.2 * 1000));


            for (int i = 0; i < 8; i++)
            {                                                        //myTrack.MsD○はdz1mmあたりのd○の変位mm
                double next_x = now_x - i * common_dx * 0.003 * 2.2; //3μm間隔で撮影
                double next_y = now_y - i * common_dy * 0.003 * 2.2; //Shrinkage Factor は2.2で計算(仮)
                mc.MovePoint(next_x, next_y, now_z - 0.003 * i);
                mc.Join();

                byte[] b      = camera.ArrayImage;
                Mat    image  = new Mat(440, 512, MatType.CV_8U, b);
                Mat    imagec = image.Clone();
                image_set.Add(imagec);
            }

            for (int i = 7; i >= 0; i--)
            {
                image_set_reverse.Add(image_set[i]);
            }

            int n = image_set.Count();//1回分の取得画像の枚数

            Mat cont  = new Mat(440, 512, MatType.CV_8U);
            Mat gau_1 = new Mat(440, 512, MatType.CV_8U);
            Mat gau_2 = new Mat(440, 512, MatType.CV_8U);
            Mat sub   = new Mat(440, 512, MatType.CV_8U);
            Mat bin   = new Mat(440, 512, MatType.CV_8U);

            double Max_kido;
            double Min_kido;

            OpenCvSharp.CPlusPlus.Point maxloc;
            OpenCvSharp.CPlusPlus.Point minloc;

            List <Mat> two_set  = new List <Mat>();
            List <Mat> Part_img = new List <Mat>();

            for (int i = 0; i < image_set.Count(); i++)
            {
                Cv2.GaussianBlur((Mat)image_set_reverse[i], gau_1, Cv.Size(3, 3), -1); //パラメータ見ないといけない。
                Cv2.GaussianBlur(gau_1, gau_2, Cv.Size(51, 51), -1);                   //パラメータ見ないといけない。
                Cv2.Subtract(gau_2, gau_1, sub);
                Cv2.MinMaxLoc(sub, out Min_kido, out Max_kido, out minloc, out maxloc);
                cont = (sub - Min_kido) * 255 / (Max_kido - Min_kido);
                cont.ImWrite(string.Format(@"C:\set\cont_{0}.bmp", i));
                Cv2.Threshold(cont, bin, 115, 1, ThresholdType.Binary);//パラメータ見ないといけない。
                two_set.Add(bin);
            }

            List <mm> white_area = new List <mm>();
            int       x0         = 256;
            int       y0         = 220;//視野の中心


            for (int delta_xx = -1; delta_xx <= 1; delta_xx++)//一番下の画像よりどれだけずらすか
            {
                for (int delta_yy = -1; delta_yy <= 1; delta_yy++)
                {
                    {
                        //    //積層写真の型作り(行列の中身は0行列)
                        //    Mat superimposed = Mat.Zeros(440 + (n - 1) * Math.Abs(delta_yy), 512 + (n - 1) * Math.Abs(delta_xx), MatType.CV_8UC1);
                        //
                        //    //各写真の型作り
                        //    for (int i = 0; i < two_set.Count; i++) {
                        //        Mat Part = Mat.Zeros(440 + (n - 1) * Math.Abs(delta_yy), 512 + (n - 1) * Math.Abs(delta_xx), MatType.CV_8UC1);
                        //        Part_img.Add(Part);
                        //    }

                        //積層写真の型作り(行列の中身は0行列)
                        Mat superimposed = Mat.Zeros(440 + 3 * Math.Abs(delta_yy), 512 + 3 * Math.Abs(delta_xx), MatType.CV_8UC1);


                        //各写真の型作り
                        for (int i = 0; i < two_set.Count; i++)
                        {
                            Mat Part = Mat.Zeros(440 + 3 * Math.Abs(delta_yy), 512 + 3 * Math.Abs(delta_xx), MatType.CV_8UC1);
                            Part_img.Add(Part);
                        }//2枚を1セットにしてずらす場合



                        if (delta_xx >= 0 && delta_yy >= 0)//画像の右下への移動
                        {
                            for (int i = 0; i < two_set.Count; i++)
                            {
                                if (i == 0 || i == 1)
                                {
                                    Part_img[i][
                                        0
                                        , 440
                                        , 0
                                        , 512
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 2 || i == 3)
                                {
                                    Part_img[i][
                                        0 + Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 4 || i == 5)
                                {
                                    Part_img[i][
                                        0 + 2 * Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + 2 * Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + 2 * Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + 2 * Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 6 || i == 7)
                                {
                                    Part_img[i][
                                        0 + 3 * Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + 3 * Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + 3 * Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + 3 * Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                            }
                            for (int i = 0; i < Part_img.Count(); i++)
                            {
                                superimposed += Part_img[i];
                            }

                            Cv2.Threshold(superimposed, superimposed, 5, 255, ThresholdType.ToZero);//パラメータ見ないといけない。

                            superimposed.SubMat(0
                                                , 440
                                                , 0
                                                , 512).CopyTo(superimposed);    //1枚目の画像の大きさ、場所で切り取る
                        }



                        if (delta_xx >= 0 && delta_yy < 0)//画像の右上への移動
                        {
                            for (int i = 0; i < two_set.Count; i++)
                            {
                                if (i == 0 || i == 1)
                                {
                                    Part_img[i][
                                        0 + 3
                                        , 440 + 3
                                        , 0
                                        , 512
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 2 || i == 3)
                                {
                                    Part_img[i][
                                        0 + 3 - 1                  //yの値のスタート地点
                                        , 440 + 3 - 1              //yの値のゴール地点
                                        , 0 + Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 4 || i == 5)
                                {
                                    Part_img[i][
                                        0 + 3 - 2                      //yの値のスタート地点
                                        , 440 + 3 - 2                  //yの値のゴール地点
                                        , 0 + 2 * Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + 2 * Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 6 || i == 7)
                                {
                                    Part_img[i][
                                        0 + 3 - 3                      //yの値のスタート地点
                                        , 440 + 3 - 3                  //yの値のゴール地点
                                        , 0 + 3 * Math.Abs(delta_xx)   //xの値のスタート地点
                                        , 512 + 3 * Math.Abs(delta_xx) //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                            }
                            for (int i = 0; i < Part_img.Count(); i++)
                            {
                                superimposed += Part_img[i];
                            }

                            Cv2.Threshold(superimposed, superimposed, 5, 255, ThresholdType.ToZero);//パラメータ見ないといけない。

                            superimposed.SubMat(0 + 3
                                                , 440 + 3
                                                , 0
                                                , 512).CopyTo(superimposed);    //1枚目の画像の大きさ、場所で切り取る
                        }



                        if (delta_xx < 0 && delta_yy < 0)//画像の左上への移動
                        {
                            for (int i = 0; i < two_set.Count; i++)
                            {
                                if (i == 0 || i == 1)
                                {
                                    Part_img[i][
                                        0 + 3
                                        , 440 + 3
                                        , 0 + 3
                                        , 512 + 3
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 2 || i == 3)
                                {
                                    Part_img[i][
                                        0 + 3 - 1       //yの値のスタート地点
                                        , 440 + 3 - 1   //yの値のゴール地点
                                        , 0 + 3 - 1     //xの値のスタート地点
                                        , 512 + 3 - 1   //xの値のゴール地点
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 4 || i == 5)
                                {
                                    Part_img[i][
                                        0 + 3 - 2       //yの値のスタート地点
                                        , 440 + 3 - 2   //yの値のゴール地点
                                        , 0 + 3 - 2     //xの値のスタート地点
                                        , 512 + 3 - 2   //xの値のゴール地点
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 6 || i == 7)
                                {
                                    Part_img[i][
                                        0 + 3 - 3           //yの値のスタート地点
                                        , 440 + 3 - 3       //yの値のゴール地点
                                        , 0 + 3 - 3         //xの値のスタート地点
                                        , 512 + 3 - 3       //xの値のゴール地点
                                    ] = two_set[i];         //処理済み画像をPartの対応する部分に入れていく
                                }
                            }
                            for (int i = 0; i < Part_img.Count(); i++)
                            {
                                superimposed += Part_img[i];
                            }

                            Cv2.Threshold(superimposed, superimposed, 5, 255, ThresholdType.ToZero);//パラメータ見ないといけない。

                            superimposed.SubMat(0 + 3
                                                , 440 + 3
                                                , 0 + 3
                                                , 512 + 3).CopyTo(superimposed);    //1枚目の画像の大きさ、場所で切り取る
                        }


                        if (delta_xx < 0 && delta_yy >= 0)//画像の左下への移動
                        {
                            for (int i = 0; i < two_set.Count; i++)
                            {
                                if (i == 0 || i == 1)
                                {
                                    Part_img[i][
                                        0
                                        , 440
                                        , 0 + 3
                                        , 512 + 3
                                    ] = two_set[i];     //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 2 || i == 3)
                                {
                                    Part_img[i][
                                        0 + Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + 3 - 1                //xの値のスタート地点
                                        , 512 + 3 - 1              //xの値のゴール地点
                                    ] = two_set[i];                //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 4 || i == 5)
                                {
                                    Part_img[i][
                                        0 + 2 * Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + 2 * Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + 3 - 2                    //xの値のスタート地点
                                        , 512 + 3 - 2                  //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                                else if (i == 6 || i == 7)
                                {
                                    Part_img[i][
                                        0 + 3 * Math.Abs(delta_yy)     //yの値のスタート地点
                                        , 440 + 3 * Math.Abs(delta_yy) //yの値のゴール地点
                                        , 0 + 3 - 3                    //xの値のスタート地点
                                        , 512 + 3 - 3                  //xの値のゴール地点
                                    ] = two_set[i];                    //処理済み画像をPartの対応する部分に入れていく
                                }
                            }
                            for (int i = 0; i < Part_img.Count(); i++)
                            {
                                superimposed += Part_img[i];
                            }

                            Cv2.Threshold(superimposed, superimposed, 5, 255, ThresholdType.ToZero);//パラメータ見ないといけない。

                            superimposed.SubMat(0
                                                , 440
                                                , 0 + 3
                                                , 512 + 3).CopyTo(superimposed);    //1枚目の画像の大きさ、場所で切り取る
                        }

                        Mat one1 = Mat.Ones(y0 - 20, 512, MatType.CV_8UC1);//視野の中心からどれだけの窓を開けるか
                        Mat one2 = Mat.Ones(41, x0 - 20, MatType.CV_8UC1);
                        Mat one3 = Mat.Ones(41, 491 - x0, MatType.CV_8UC1);
                        Mat one4 = Mat.Ones(419 - y0, 512, MatType.CV_8UC1);

                        superimposed[0, y0 - 20, 0, 512]             = one1 * 0;
                        superimposed[y0 - 20, y0 + 21, 0, x0 - 20]   = one2 * 0;
                        superimposed[y0 - 20, y0 + 21, x0 + 21, 512] = one3 * 0;
                        superimposed[y0 + 21, 440, 0, 512]           = one4 * 0;//中心から○μmの正方形以外は黒くする。

                        superimposed.ImWrite("C:\\set\\superimposed25_1.bmp");

                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            using (CvContourScanner scanner = new CvContourScanner(superimposed.ToIplImage(), storage, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple))
                            {
                                foreach (CvSeq <CvPoint> c in scanner)
                                {
                                    CvMoments mom = new CvMoments(c, false);
                                    if (c.ElemSize < 2)
                                    {
                                        continue;
                                    }
                                    if (mom.M00 == 0.0)
                                    {
                                        continue;
                                    }
                                    double mx   = mom.M10 / mom.M00;
                                    double my   = mom.M01 / mom.M00;
                                    mm     koko = new mm();
                                    koko.white_x    = mx;
                                    koko.white_y    = my;
                                    koko.white_kido = mom.M00;
                                    koko.white_dx   = delta_xx;
                                    koko.white_dy   = delta_yy;
                                    white_area.Add(koko);
                                    stage.WriteLine(String.Format("mx={0:f2} , my={1:f2} , dx={2:f2} , dy={3:f2} , M={4:f2}", mx, my, delta_xx, delta_yy, mom.M00));
                                }
                            }
                        }
                        Part_img.Clear();
                    } //pixel移動x
                }     //pixel移動y
            }
            if (white_area.Count > 0)
            {
                double center_x  = 0;
                double center_y  = 0;
                double center_dx = 0;
                double center_dy = 0;
                double kido_sum  = 0;
                for (int i = 0; i < white_area.Count; i++)
                {
                    kido_sum  += white_area[i].white_kido;
                    center_x  += white_area[i].white_x * white_area[i].white_kido;
                    center_y  += white_area[i].white_y * white_area[i].white_kido;
                    center_dx += white_area[i].white_dx * white_area[i].white_kido;
                    center_dy += white_area[i].white_dy * white_area[i].white_kido;
                }
                center_x  = center_x / kido_sum;
                center_y  = center_y / kido_sum;
                center_dx = center_dx / kido_sum;
                center_dy = center_dy / kido_sum;

                int c_o_g_x;
                int c_o_g_y;
                if (center_x >= 0)
                {
                    c_o_g_x = (int)(center_x + 0.5);
                }
                else
                {
                    c_o_g_x = (int)(center_x - 0.5);
                }

                if (center_x >= 0)
                {
                    c_o_g_y = (int)(center_y + 0.5);
                }
                else
                {
                    c_o_g_y = (int)(center_y - 0.5);
                }

                int dx_pixel = c_o_g_x - x0;
                int dy_pixel = c_o_g_y - y0;

                double dx_micron = dx_pixel * 0.265625 / 1000;
                double dy_micron = dy_pixel * 0.265625 / 1000;

                double now_x2 = mc.GetPoint().X;
                double now_y2 = mc.GetPoint().Y;
                mc.MovePointXY(now_x2 - dx_micron, now_y2 + dy_micron);//pixelの軸とstageの軸の関係から
                mc.Join();

                over_dx = center_dx;
                over_dy = center_dy;
            }
        }
Exemple #9
0
        private void BeamDetection(string outputfilename, bool isup)
        {// beam Detection
            int BinarizeThreshold   = 60;
            int BrightnessThreshold = 4;
            int nop = 7;

            double dz = 0;

            if (isup == true)
            {
                dz = -0.003;
            }
            else
            {
                dz = 0.003;
            }

            Camera         camera    = Camera.GetInstance();
            MotorControler mc        = MotorControler.GetInstance(parameterManager);
            Vector3        InitPoint = mc.GetPoint();
            Vector3        p         = new Vector3();
            TracksManager  tm        = parameterManager.TracksManager;
            int            mod       = parameterManager.ModuleNo;
            int            pl        = parameterManager.PlateNo;
            Track          myTrack   = tm.GetTrack(tm.TrackingIndex);

            string[] sp = myTrack.IdString.Split('-');

            //string datfileName = string.Format("{0}.dat", System.DateTime.Now.ToString("yyyyMMdd_HHmmss"));
            string       datfileName = string.Format(@"c:\test\bpm\{0}\{1}-{2}-{3}-{4}-{5}.dat", mod, mod, pl, sp[0], sp[1], System.DateTime.Now.ToString("ddHHmmss"));
            BinaryWriter writer      = new BinaryWriter(File.Open(datfileName, FileMode.Create));

            byte[] bb = new byte[440 * 512 * nop];

            string       fileName = string.Format("{0}", outputfilename);
            StreamWriter twriter  = File.CreateText(fileName);
            string       stlog    = "";


            List <ImageTaking> LiIT = TakeSequentialImage(0.0, 0.0, dz, nop);

            Mat sum = Mat.Zeros(440, 512, MatType.CV_8UC1);

            for (int i = 0; i < LiIT.Count; i++)
            {
                Mat bin = (Mat)DogContrastBinalize(LiIT[i].img, 31, BinarizeThreshold);
                Cv2.Add(sum, bin, sum);
                //byte[] b = LiIT[i].img.ToBytes();//format is .png
                MatOfByte mob = new MatOfByte(LiIT[i].img);
                byte[]    b   = mob.ToArray();
                b.CopyTo(bb, 440 * 512 * i);
            }

            mc.MovePointZ(InitPoint.Z);
            mc.Join();


            Cv2.Threshold(sum, sum, BrightnessThreshold, 1, ThresholdType.Binary);


            //Cv2.FindContoursをつかうとAccessViolationExceptionになる(Release/Debug両方)ので、C-API風に書く
            using (CvMemStorage storage = new CvMemStorage())
            {
                using (CvContourScanner scanner = new CvContourScanner(sum.ToIplImage(), storage, CvContour.SizeOf, ContourRetrieval.Tree, ContourChain.ApproxSimple))
                {
                    //string fileName = string.Format(@"c:\img\{0}.txt",
                    //        System.DateTime.Now.ToString("yyyyMMdd_HHmmss_fff"));

                    foreach (CvSeq <CvPoint> c in scanner)
                    {
                        CvMoments mom = new CvMoments(c, false);
                        if (c.ElemSize < 2)
                        {
                            continue;
                        }
                        if (mom.M00 == 0.0)
                        {
                            continue;
                        }
                        double mx = mom.M10 / mom.M00;
                        double my = mom.M01 / mom.M00;
                        stlog += string.Format("{0:F} {1:F}\n", mx, my);
                    }
                }
            }

            twriter.Write(stlog);
            twriter.Close();

            writer.Write(bb);
            writer.Flush();
            writer.Close();

            sum *= 255;
            sum.ImWrite(String.Format(@"c:\img\{0}_{1}_{2}.bmp",
                                      System.DateTime.Now.ToString("yyyyMMdd_HHmmss"),
                                      (int)(p.X * 1000),
                                      (int)(p.Y * 1000)));
        }//BeamDetection