Ejemplo n.º 1
0
    private ShapeClip DetectClip(CvSeq <CvPoint> contour, IplImage image)
    {
        // Approximate contours to rectange.
        CvMemStorage    cstorage = new CvMemStorage();
        CvSeq <CvPoint> verts    = contour.ApproxPoly(CvContour.SizeOf, cstorage, ApproxPolyMethod.DP, contour.ContourPerimeter() * 0.05);
        CvRect          rect     = Cv.BoundingRect(verts);

        // scale BB
        CvSize originalSize = rect.Size;
        CvSize size         = new CvSize((int)(rect.Width * 1.5), (int)(rect.Height * 1.5));
        CvSize sizeDist     = new CvSize(rect.Width - size.Width, rect.Height - size.Height);

        rect = new CvRect(
            Math.Max(rect.Location.X + sizeDist.Width / 2, 0),
            Math.Max(rect.Location.Y + sizeDist.Height / 2, 0), size.Width, size.Height);

        // If rect, convert to region of interest and approximate top.
        if (verts.Total >= 4 && new CvRect(0, 0, image.Width, image.Height).Contains(rect))
        {
            DetectionState detectionState = verts.Total == 4 ? DetectionState.SemiOriented : DetectionState.Candidate;
            double         angle          = (180.0 / Math.PI) * ComputeOrientationFromVerts(verts.ToArray());

            using (IplImage region = image.Clone(rect))
                using (IplImage finalRegion = image.Clone(rect))
                    using (IplImage colorRegion = new IplImage(region.Size.Width, region.Size.Height, BitDepth.U8, 3))
                        using (IplImage debug = new IplImage(region.Size.Width + 20, region.Size.Height + 20, BitDepth.U8, 3))
                        {
                            // Rotate into position based on the line angle estimate
                            Cv.WarpAffine(region, region, Cv.GetRotationMatrix2D(new CvPoint2D32f(rect.Width / 2, rect.Height / 2), angle, 1));
                            Cv.FloodFill(region, new CvPoint(0, 0), 255, 0, 150);

                            // Project image and find clusters
                            region.Not(region);
                            double[] horizontalProjection, verticalProjection;
                            int[]    horizontalPrjClusters = ComputeClusters(region, true, out horizontalProjection);
                            int      horizontalClusters = horizontalPrjClusters[0], lastHorizontalCluster = horizontalPrjClusters[1];
                            int[]    verticalPrjClusters = ComputeClusters(region, false, out verticalProjection);
                            int      verticalClusters = verticalPrjClusters[0], lastVerticalCluster = verticalPrjClusters[1];



                            // Correct the orientation based on the clusters found
                            bool foundLDRs = false;
                            if (verticalClusters > horizontalClusters)
                            {
                                // 90 deg

                                if (lastHorizontalCluster < region.Width / 2)
                                {
                                    // 90deg
                                    angle    += 90;
                                    foundLDRs = true;
                                }
                                else
                                {
                                    // 270 deg
                                    angle    += 270;
                                    foundLDRs = true;
                                }
                            }
                            else if (verticalClusters < horizontalClusters)
                            {
                                // 0 deg
                                if (lastVerticalCluster < region.Height / 2)
                                {
                                    // 0deg
                                    foundLDRs = true;
                                }
                                else
                                {
                                    // 180 deg
                                    angle    += 180;
                                    foundLDRs = true;
                                }
                            }
                            else
                            {
                                // something went wrong with our initial alignment
                                //    NO proper orientation found - could not identify the LDRs
                            }

                            #region DEBUG
                            //debug.Zero();
                            //Cv.CvtColor(finalRegion, colorRegion, ColorConversion.GrayToRgb);
                            //debug.DrawImage(20, 0, region.Width, region.Height, colorRegion);

                            //for (int i = 0; i < region.Width / 2; i++)
                            //    debug.DrawRect(20 + i, debug.Height - (int)(horizontalProjection[i] * 100), 20 + i, debug.Height, CvColor.Red, 1);
                            //for (int i = 0; i < region.Height / 2; i++)
                            //    debug.DrawRect(0, i, (int)(verticalProjection[i] * 100), i, CvColor.Red, 1);
                            //debugWindow.ShowImage(debug);
                            #endregion

                            if (foundLDRs)
                            {
                                detectionState = DetectionState.FullyOriented;
                            }
                        }

            // Compute pixel space mapping
            Vec2F scale = new Vec2F(screenResolution.X / image.Width, screenResolution.Y / image.Height);

            return(new ShapeClip(
                       detectionState,
                       new Vec2F(rect.Location.X + 0.5f * rect.Width, rect.Location.Y + 0.5f * rect.Height).Scale(scale),
                       new Vec2F(originalSize).Scale(scale),
                       angle));
        }
        else
        {
            return(null);
        }
    }
Ejemplo n.º 2
0
        /// <summary>
        /// sample of C style wrapper
        /// </summary>
        private void SampleC()
        {
            // cvHoughLines2
            // 標準的ハフ変換と確率的ハフ変換を指定して線(線分)の検出を行なう.サンプルコード内の各パラメータ値は処理例の画像に対してチューニングされている.

            // (1)画像の読み込み
            using (IplImage srcImgGray = new IplImage(Const.ImageGoryokaku, LoadMode.GrayScale))
                using (IplImage srcImgStd = new IplImage(Const.ImageGoryokaku, LoadMode.Color))
                    using (IplImage srcImgProb = srcImgStd.Clone())
                    {
                        // (2)ハフ変換のための前処理
                        Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3);
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            // (3)標準的ハフ変換による線の検出と検出した線の描画
                            CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0);
                            // wrapper style
                            //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0);

                            int limit = Math.Min(lines.Total, 10);
                            for (int i = 0; i < limit; i++)
                            {
                                // native code style

                                /*
                                 * unsafe
                                 * {
                                 *  float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer();
                                 *  float rho = line[0];
                                 *  float theta = line[1];
                                 * }
                                 * //*/

                                // wrapper style
                                CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value;
                                float rho   = elem.Rho;
                                float theta = elem.Theta;

                                double  a   = Math.Cos(theta);
                                double  b   = Math.Sin(theta);
                                double  x0  = a * rho;
                                double  y0  = b * rho;
                                CvPoint pt1 = new CvPoint {
                                    X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a))
                                };
                                CvPoint pt2 = new CvPoint {
                                    X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a))
                                };
                                srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0);
                            }

                            // (4)確率的ハフ変換による線分の検出と検出した線分の描画
                            lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10);
                            // wrapper style
                            //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0);

                            for (int i = 0; i < lines.Total; i++)
                            {
                                // native code style

                                /*
                                 * unsafe
                                 * {
                                 *  CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer();
                                 *  src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0);
                                 * }
                                 * //*/

                                // wrapper style
                                CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value;
                                srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0);
                            }
                        }

                        // (5)検出結果表示用のウィンドウを確保し表示する
                        using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd))
                            using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
        }
Ejemplo n.º 3
0
    // Update is called once per frame
    void Update()
    {
        // Webカメラから1フレーム分の画像を取得
        _Frame = Cv.QueryFrame(_Capture);
        // Webカメラ画像をコピーしてSTEP1の入力画像に使用
        IplImage img = _Frame.Clone();
        // STEP1用変数
        IplImage smoothed = new IplImage(img.Size, BitDepth.U8, 3);
        // STEP2用変数
        IplImage hsv = new IplImage(img.Size, BitDepth.U8, 3);
        // STEP3用変数
        IplImage     segmented = new IplImage(img.Size, BitDepth.U8, 1);
        CvMemStorage storage   = new CvMemStorage();
        // STEP4用変数
        IplImage imgTmp     = new IplImage(img.Size, BitDepth.U8, 1);
        IplImage morphology = new IplImage(img.Size, BitDepth.U8, 1);
        // STEP5用変数
        CvSeq <CvPoint> contours;
        IplImage        detected = _Frame.Clone();

        // STEP1: ノイズ除去
        Cv.Smooth(img, smoothed, SmoothType.Blur, 1);
        //Cv.ShowImage("window",smoothed);
        //Cv.Smooth(smoothed, smoothed, SmoothType.Gaussian,1);

        // STEP2: 色をRGBからHSVに変換
        Cv.CvtColor(smoothed, hsv, ColorConversion.BgrToHsv);
        //Cv.ShowImage("window",hsv);
        // STEP3: 領域分割
        storage.Clear();
        Cv.InRangeS(hsv,
                    new CvScalar((pointhsv.Val0) - RANGE_H,
                                 (pointhsv.Val1) - RANGE_S,
                                 (pointhsv.Val2) - RANGE_V),
                    new CvScalar((pointhsv.Val0) + RANGE_H,
                                 (pointhsv.Val1) + RANGE_S,
                                 (pointhsv.Val2) + RANGE_V),
                    segmented);
        //Cv.ShowImage("window",segmented);
        // STEP4: ノイズ除去

        Cv.Dilate(segmented, imgTmp);
        Cv.Erode(imgTmp, imgTmp);

        Cv.Erode(imgTmp, imgTmp);
        Cv.Dilate(imgTmp, morphology);

        //Cv.ShowImage("window",morphology);

        // STEP5: 円の検出
        Cv.FindContours(morphology, storage, out contours,
                        CvContour.SizeOf, ContourRetrieval.Tree,
                        ContourChain.ApproxNone);

        if (contours == null)
        {
            Debug.Log("PSMove is not detected");
        }
        else
        {
            contours = Cv.ApproxPoly(contours, CvContour.SizeOf,
                                     storage, ApproxPolyMethod.DP,
                                     Cv.ContourPerimeter(contours) * CTR_PARAM, true);

            Cv.DrawContours(morphology, contours,
                            new CvScalar(MAX_G, 0, 0), new CvScalar(0, MAX_B, 0), 3, -1);

            Cv.MinEnclosingCircle(contours, out _Center, out _Radius);
            Cv.DrawCircle(morphology, _Center, 2, new CvScalar(0, MAX_B, 0));

            // STEP6: 画像をウィンドウに出力
            Sz = fx * SPHERE_R / _Radius;

            Sx = -((_Center.X - ux) * Sz) / fx;
            Sy = -((_Center.Y - uy) * Sz) / fy;
        }

        _Window.ShowImage(_Frame);
        Cv.ShowImage("Original", img);
        Cv.ShowImage("STEP1:Smoothing", smoothed);
        Cv.ShowImage("STEP2:HSV", hsv);
        Cv.ShowImage("STEP3:Segmentation", segmented);
        Cv.ShowImage("STEP4:Morphology", morphology);
        Cv.ShowImage("STEP5:Detected", detected);
    }
Ejemplo n.º 4
0
        static CvPoint[] FindSquares4(IplImage img, CvMemStorage storage)
        {
            const int N = 11;

            CvSize   sz   = new CvSize(img.Width & -2, img.Height & -2);
            IplImage timg = img.Clone(); // make a copy of input image
            IplImage gray = new IplImage(sz, BitDepth.U8, 1);
            IplImage pyr  = new IplImage(sz.Width / 2, sz.Height / 2, BitDepth.U8, 3);
            // create empty sequence that will contain points -
            // 4 points per square (the square's vertices)
            CvSeq <CvPoint> squares = new CvSeq <CvPoint>(SeqType.Zero, CvSeq.SizeOf, storage);

            // select the maximum ROI in the image
            // with the width and height divisible by 2
            timg.ROI = new CvRect(0, 0, sz.Width, sz.Height);

            // down-scale and upscale the image to filter out the noise
            Cv.PyrDown(timg, pyr, CvFilter.Gaussian5x5);
            Cv.PyrUp(pyr, timg, CvFilter.Gaussian5x5);
            IplImage tgray = new IplImage(sz, BitDepth.U8, 1);

            // find squares in every color plane of the image
            for (int c = 0; c < 3; c++)
            {
                // extract the c-th color plane
                timg.COI = c + 1;
                Cv.Copy(timg, tgray, null);

                // try several threshold levels
                for (int l = 0; l < N; l++)
                {
                    // hack: use Canny instead of zero threshold level.
                    // Canny helps to catch squares with gradient shading
                    if (l == 0)
                    {
                        // apply Canny. Take the upper threshold from slider
                        // and set the lower to 0 (which forces edges merging)
                        Cv.Canny(tgray, gray, 0, Thresh, ApertureSize.Size5);
                        // dilate canny output to remove potential
                        // holes between edge segments
                        Cv.Dilate(gray, gray, null, 1);
                    }
                    else
                    {
                        // apply threshold if l!=0:
                        //     tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
                        Cv.Threshold(tgray, gray, (l + 1) * 255.0 / N, 255, ThresholdType.Binary);
                    }

                    // find contours and store them all as a list
                    CvSeq <CvPoint> contours;
                    Cv.FindContours(gray, storage, out contours, CvContour.SizeOf, ContourRetrieval.List, ContourChain.ApproxSimple, new CvPoint(0, 0));

                    // test each contour
                    while (contours != null)
                    {
                        // approximate contour with accuracy proportional
                        // to the contour perimeter
                        CvSeq <CvPoint> result = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, contours.ContourPerimeter() * 0.02, false);
                        // square contours should have 4 vertices after approximation
                        // relatively large area (to filter out noisy contours)
                        // and be convex.
                        // Note: absolute value of an area is used because
                        // area may be positive or negative - in accordance with the
                        // contour orientation
                        if (result.Total == 4 && Math.Abs(result.ContourArea(CvSlice.WholeSeq)) > 1000 && result.CheckContourConvexity())
                        {
                            double s = 0;

                            for (int i = 0; i < 5; i++)
                            {
                                // find minimum Angle between joint
                                // edges (maximum of cosine)
                                if (i >= 2)
                                {
                                    double t = Math.Abs(Angle(result[i].Value, result[i - 2].Value, result[i - 1].Value));
                                    s = s > t ? s : t;
                                }
                            }

                            // if cosines of all angles are small
                            // (all angles are ~90 degree) then write quandrange
                            // vertices to resultant sequence
                            if (s < 0.3)
                            {
                                for (int i = 0; i < 4; i++)
                                {
                                    //Console.WriteLine(result[i]);
                                    squares.Push(result[i].Value);
                                }
                            }
                        }

                        // take the next contour
                        contours = contours.HNext;
                    }
                }
            }

            // release all the temporary images
            gray.Dispose();
            pyr.Dispose();
            tgray.Dispose();
            timg.Dispose();

            return(squares.ToArray());
        }
Ejemplo n.º 5
0
        /// <summary>
        /// sample of C style wrapper
        /// </summary>
        private void SampleC()
        {
            // cvHoughLines2

            using (IplImage srcImgGray = new IplImage(FilePath.Image.Goryokaku, LoadMode.GrayScale))
                using (IplImage srcImgStd = new IplImage(FilePath.Image.Goryokaku, LoadMode.Color))
                    using (IplImage srcImgProb = srcImgStd.Clone())
                    {
                        Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3);
                        using (CvMemStorage storage = new CvMemStorage())
                        {
                            // Standard algorithm
                            CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0);
                            // wrapper style
                            //CvLineSegmentPolar[] lines = src_img_gray.HoughLinesStandard(1, Math.PI / 180, 50, 0, 0);

                            int limit = Math.Min(lines.Total, 10);
                            for (int i = 0; i < limit; i++)
                            {
                                // native code style

                                /*
                                 * unsafe
                                 * {
                                 *  float* line = (float*)lines.GetElem<IntPtr>(i).Value.ToPointer();
                                 *  float rho = line[0];
                                 *  float theta = line[1];
                                 * }
                                 * //*/

                                // wrapper style
                                CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value;
                                float rho   = elem.Rho;
                                float theta = elem.Theta;

                                double  a   = Math.Cos(theta);
                                double  b   = Math.Sin(theta);
                                double  x0  = a * rho;
                                double  y0  = b * rho;
                                CvPoint pt1 = new CvPoint {
                                    X = Cv.Round(x0 + 1000 * (-b)), Y = Cv.Round(y0 + 1000 * (a))
                                };
                                CvPoint pt2 = new CvPoint {
                                    X = Cv.Round(x0 - 1000 * (-b)), Y = Cv.Round(y0 - 1000 * (a))
                                };
                                srcImgStd.Line(pt1, pt2, CvColor.Red, 3, LineType.AntiAlias, 0);
                            }

                            // Probabilistic algorithm
                            lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Probabilistic, 1, Math.PI / 180, 50, 50, 10);
                            // wrapper style
                            //CvLineSegmentPoint[] lines = src_img_gray.HoughLinesProbabilistic(1, Math.PI / 180, 50, 0, 0);

                            for (int i = 0; i < lines.Total; i++)
                            {
                                // native code style

                                /*
                                 * unsafe
                                 * {
                                 *  CvPoint* point = (CvPoint*)lines.GetElem<IntPtr>(i).Value.ToPointer();
                                 *  src_img_prob.Line(point[0], point[1], CvColor.Red, 3, LineType.AntiAlias, 0);
                                 * }
                                 * //*/

                                // wrapper style
                                CvLineSegmentPoint elem = lines.GetSeqElem <CvLineSegmentPoint>(i).Value;
                                srcImgProb.Line(elem.P1, elem.P2, CvColor.Red, 3, LineType.AntiAlias, 0);
                            }
                        }

                        using (new CvWindow("Hough_line_standard", WindowMode.AutoSize, srcImgStd))
                            using (new CvWindow("Hough_line_probabilistic", WindowMode.AutoSize, srcImgProb))
                            {
                                CvWindow.WaitKey(0);
                            }
                    }
        }
Ejemplo n.º 6
0
        /// <summary>
        /// OpenCVのIplImageを指定した出力先にSystem.Drawing.Bitmapとして変換する
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="dst">出力先のSystem.Drawing.Bitmap</param>
        /// <remarks>Author: shimat, Gummo (ROI support)</remarks>
#else
        /// <summary>
        /// Converts IplImage to System.Drawing.Bitmap
        /// </summary>
        /// <param name="src">Mat</param>
        /// <param name="dst">IplImage</param>
        /// <remarks>Author: shimat, Gummo (ROI support)</remarks>
#endif
        public static unsafe void ToBitmap(this IplImage src, Bitmap dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }
            if (src.IsDisposed)
            {
                throw new ArgumentException("The image is disposed.", "src");
            }
            //if (src.Depth != BitDepth.U8)
            //    throw new ArgumentOutOfRangeException("src");
            if (src.ROI.Width != dst.Width || src.ROI.Height != dst.Height)
            {
                throw new ArgumentException("");
            }

            PixelFormat pf = dst.PixelFormat;

            // 1プレーン用の場合、グレースケールのパレット情報を生成する
            if (pf == PixelFormat.Format8bppIndexed)
            {
                ColorPalette plt = dst.Palette;
                for (int x = 0; x < 256; x++)
                {
                    plt.Entries[x] = Color.FromArgb(x, x, x);
                }
                dst.Palette = plt;
            }

            // BitDepth.U8以外の場合はスケーリングする
            IplImage _src;

            if (src.Depth != BitDepth.U8)
            {
                _src = new IplImage(src.Size, BitDepth.U8, src.NChannels);
                using (IplImage f = src.Clone())
                {
                    if (src.Depth == BitDepth.F32 || src.Depth == BitDepth.F64)
                    {
                        Cv.Normalize(src, f, 255, 0, NormType.MinMax);
                    }
                    Cv.ConvertScaleAbs(f, _src);
                }
            }
            else
            {
                _src = src;
            }
            Bitmap _dst = dst;

            int        w    = _src.ROI.Width;
            int        h    = _src.ROI.Height;
            Rectangle  rect = new Rectangle(0, 0, w, h);
            BitmapData bd   = null;

            try
            {
                bd = _dst.LockBits(rect, ImageLockMode.WriteOnly, pf);

                byte *psrc         = (byte *)(_src.ImageData.ToPointer());
                byte *pdst         = (byte *)(bd.Scan0.ToPointer());
                int   xo           = _src.ROI.X;
                int   yo           = _src.ROI.Y;
                int   widthStepSrc = _src.WidthStep;
                int   widthStepDst = ((_src.ROI.Width * _src.NChannels) + 3) / 4 * 4; // 4の倍数に揃える
                int   stride       = bd.Stride;
                int   ch           = _src.NChannels;

                switch (pf)
                {
                case PixelFormat.Format1bppIndexed:
                {
                    // BitmapDataは4byte幅だが、IplImageは1byte幅
                    // 手作業で移し替える
                    //int offset = stride - (w / 8);
                    int  x = xo;
                    int  y;
                    int  bytePos;
                    byte mask;
                    byte b = 0;
                    int  i;
                    for (y = yo; y < h; y++)
                    {
                        for (bytePos = 0; bytePos < stride; bytePos++)
                        {
                            if (x < w)
                            {
                                for (i = 0; i < 8; i++)
                                {
                                    mask = (byte)(0x80 >> i);
                                    if (x < w && psrc[widthStepSrc * y + x] == 0)
                                    {
                                        b &= (byte)(mask ^ 0xff);
                                    }
                                    else
                                    {
                                        b |= mask;
                                    }

                                    x++;
                                }
                                pdst[bytePos] = b;
                            }
                        }
                        x     = xo;
                        pdst += stride;
                    }
                    break;
                }

                case PixelFormat.Format8bppIndexed:
                case PixelFormat.Format24bppRgb:
                case PixelFormat.Format32bppArgb:
                    if (widthStepSrc == widthStepDst && _src.ROI.Size == _src.Size)
                    {
                        Util.CopyMemory(pdst, psrc, _src.ImageSize);
                    }
                    else
                    {
                        for (int y = 0; y < h; y++)
                        {
                            int offsetSrc = ((y + yo) * widthStepSrc) + xo;
                            int offsetDst = (y * widthStepDst);

                            /*
                             * for (int x = 0; x < _src.ROI.Width; x++)
                             * {
                             *  pdst[x + offset_dst] = psrc[x + offset_src];
                             * }
                             * //*/
                            // 一列ごとにコピー
                            Util.CopyMemory(pdst + offsetDst, psrc + offsetSrc, w * ch);
                        }
                    }
                    break;

                default:
                    throw new NotImplementedException();
                }
            }
            finally
            {
                _dst.UnlockBits(bd);
            }

            // 反転対策
            if (src.Origin == ImageOrigin.BottomLeft)
            {
                _dst.RotateFlip(RotateFlipType.RotateNoneFlipY);
            }

            // スケーリングのために余分に作ったインスタンスの破棄
            if (_src != src)
            {
                _src.Dispose();
            }
        }
Ejemplo n.º 7
0
        private void RenderThread()
        {
            Stopwatch sw    = new Stopwatch();
            int       count = 0;

            sw.Start();

            while (true)
            {
                if (_src == null)
                {
                    Thread.Sleep(10);
                    continue;
                }

                IplImage src = null;

                lock (lockobj)
                {
                    src = _src.Clone();
                    _src.Dispose();
                    _src = null;
                }

                if (_isFirst)
                {
                    _isFirst = false;

                    Dispatcher.Invoke(new Action(delegate
                    {
                        Slider_CenterX.Minimum = 0;
                        Slider_CenterX.Maximum = src.Width;
                        Slider_CenterX.Value   = src.Width / 2;

                        Slider_CenterY.Minimum = 0;
                        Slider_CenterY.Maximum = src.Height;
                        Slider_CenterY.Value   = src.Height / 2;

                        Slider_R2.Minimum = 1;
                        Slider_R2.Maximum = Math.Min(src.Width, src.Height) / 2;
                        Slider_R2.Value   = Math.Min(src.Width, src.Height) / 2;

                        Slider_R1.Minimum = 0;
                        Slider_R1.Maximum = Math.Min(src.Width, src.Height) / 2 - 1;
                        Slider_R1.Value   = Math.Min(src.Width, src.Height) / 4;
                    }));
                }

                Dispatcher.Invoke(new Action(delegate
                {
                    _camera.Center   = new OpenCvSharp.CPlusPlus.Point(Slider_CenterX.Value, Slider_CenterY.Value);
                    _camera.R1       = (float)Slider_R1.Value;
                    _camera.R2       = (float)Slider_R2.Value;
                    _camera.OffsetTh = (float)(Slider_OffsetTH.Value / 180.0 * Math.PI);
                }));

                _camera.Undistortion(src, out IplImage dst, _panoramaImageWidth, out IplImage guide, _originalImageWidth);

                MemoryStream msx = new MemoryStream();
                dst.ToStream(msx, ".bmp");
                byte[] imageData = ImageCompleted(msx);

                Dispatcher.BeginInvoke(new Action(delegate
                {
                    Grid_SettingArea.IsEnabled = true;

                    try
                    {
                        MemoryStream ms = new MemoryStream();
                        guide.ToStream(ms, ".bmp");
                        BitmapImage bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.StreamSource = ms;
                        bitmap.EndInit();

                        Image_Original.Source = bitmap;
                    }
                    catch { }

                    try
                    {
                        MemoryStream ms2   = new MemoryStream(imageData);
                        BitmapImage bitmap = new BitmapImage();
                        bitmap.BeginInit();
                        bitmap.StreamSource = ms2;
                        bitmap.EndInit();

                        Image_Panorama.Source = bitmap;
                    }
                    catch { }

                    //try
                    //{
                    //    MemoryStream ms = new MemoryStream();
                    //    dst.ToStream(ms, ".bmp");
                    //    BitmapImage bitmap = new BitmapImage();
                    //    bitmap.BeginInit();
                    //    bitmap.StreamSource = ms;
                    //    bitmap.EndInit();

                    //    Image_Panorama.Source = bitmap;
                    //}
                    //catch { }

                    src.Dispose();
                    guide.Dispose();
                    dst.Dispose();

                    count++;
                    if (sw.ElapsedMilliseconds >= 1000)
                    {
                        sw.Stop();
                        _outputFPS = count * (1000.0 / sw.ElapsedMilliseconds);
                        count      = 0;
                        sw.Restart();
                    }

                    Label_InputFPS.Content  = _inputFPS.ToString("F2");
                    Label_OutputFPS.Content = _outputFPS.ToString("F2");
                }));
            }
        }
Ejemplo n.º 8
0
        public Inpaint()
        {
            // cvInpaint

            Console.WriteLine(
                "Hot keys: \n" +
                "\tESC - quit the program\n" +
                "\tr - restore the original image\n" +
                "\ti or ENTER - run inpainting algorithm\n" +
                "\t\t(before running it, paint something on the image)\n" +
                "\ts - save the original image, mask image, original+mask image and inpainted image to desktop."
                );

            using (IplImage img0 = new IplImage(FilePath.Image.Fruits, LoadMode.AnyDepth | LoadMode.AnyColor))
            {
                using (IplImage img = img0.Clone())
                    using (IplImage inpaintMask = new IplImage(img0.Size, BitDepth.U8, 1))
                        using (IplImage inpainted = img0.Clone())
                        {
                            inpainted.Zero();
                            inpaintMask.Zero();

                            using (CvWindow wImage = new CvWindow("image", WindowMode.AutoSize, img))
                            {
                                CvPoint prevPt = new CvPoint(-1, -1);
                                wImage.OnMouseCallback += delegate(MouseEvent ev, int x, int y, MouseEvent flags)
                                {
                                    if (ev == MouseEvent.LButtonUp || (flags & MouseEvent.FlagLButton) == 0)
                                    {
                                        prevPt = new CvPoint(-1, -1);
                                    }
                                    else if (ev == MouseEvent.LButtonDown)
                                    {
                                        prevPt = new CvPoint(x, y);
                                    }
                                    else if (ev == MouseEvent.MouseMove && (flags & MouseEvent.FlagLButton) != 0)
                                    {
                                        CvPoint pt = new CvPoint(x, y);
                                        if (prevPt.X < 0)
                                        {
                                            prevPt = pt;
                                        }
                                        inpaintMask.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                        img.Line(prevPt, pt, CvColor.White, 5, LineType.AntiAlias, 0);
                                        prevPt = pt;
                                        wImage.ShowImage(img);
                                    }
                                };

                                for (; ;)
                                {
                                    switch ((char)CvWindow.WaitKey(0))
                                    {
                                    case (char)27: // exit
                                        CvWindow.DestroyAllWindows();
                                        return;

                                    case 'r': // restore original image
                                        inpaintMask.Zero();
                                        img0.Copy(img);
                                        wImage.ShowImage(img);
                                        break;

                                    case 'i': // do Inpaint
                                    case '\r':
                                        CvWindow wInpaint = new CvWindow("inpainted image", WindowMode.AutoSize);
                                        img.Inpaint(inpaintMask, inpainted, 3, InpaintMethod.Telea);
                                        wInpaint.ShowImage(inpainted);
                                        break;

                                    case 's': // save images
                                        string desktop = Environment.GetFolderPath(Environment.SpecialFolder.Desktop);
                                        img0.SaveImage(Path.Combine(desktop, "original.png"));
                                        inpaintMask.SaveImage(Path.Combine(desktop, "mask.png"));
                                        img.SaveImage(Path.Combine(desktop, "original+mask.png"));
                                        inpainted.SaveImage(Path.Combine(desktop, "inpainted.png"));
                                        break;
                                    }
                                }
                            }
                        }
            }
        }
Ejemplo n.º 9
0
        public IplImage ImageTreatment(IplImage img, out IplImage[] plateImages)
        {
            IplImage        tgray        = null;
            IplImage        gray         = null;
            IplImage        mainSubImage = null;
            IplImage        tmpImage     = null;
            IplImage        tmpImage2    = null;
            CvBlobs         blobs1       = null;
            CvBlobs         blobs2       = null;
            List <IplImage> plates       = null;
            CvRect          subImageRect;

            plateImages = null;

            try
            {
                plates       = new List <IplImage>();
                mainSubImage = ExtractSubImage(img, out subImageRect);
                tgray        = new IplImage(mainSubImage.Size, BitDepth.U8, 1);
                mainSubImage.CvtColor(tgray, ColorConversion.RgbaToGray);
                blobs1 = PreProcessImage1(mainSubImage, tgray);
                blobs2 = PreProcessImage2(mainSubImage, tgray);


                tmpImage  = img.Clone();
                tmpImage2 = mainSubImage.Clone();
                tmpImage.SetROI(subImageRect);
                if (null != blobs1 && blobs1.Count > 0)
                {
                    IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs1, 2.4);
                    if (null != plateImage)
                    {
                        plates.AddRange(plateImage);
                    }
                }

                if (null != blobs2 && blobs2.Count > 0)
                {
                    IplImage[] plateImage = GetPlates(tmpImage, tmpImage2, blobs2, 3.5);
                    if (null != plateImage)
                    {
                        plates.AddRange(plateImage);
                    }
                }
                tmpImage.ResetROI();

                Cv.ReleaseImage(gray);
                gray = tmpImage;
            }
            finally
            {
                if (null != tmpImage2)
                {
                    Cv.ReleaseImage(tmpImage2);
                }

                if (null != tgray)
                {
                    Cv.ReleaseImage(tgray);
                }

                if (null != mainSubImage)
                {
                    Cv.ReleaseImage(mainSubImage);
                }
            }

            if (plates.Count > 0)
            {
                plateImages = plates.ToArray();
            }

            return(gray);
        }
Ejemplo n.º 10
0
        public void Start()
        {
            if (canExecute)
            {
                return;            //既にカメラが起動していたら何もしない ※stop忘れ防止 Stopするのを忘れてStartすると二重起動して異常動作します
            }
            IsActive   = true;
            canExecute = true;
            var             im = new IplImage(); // カメラ画像格納用の変数
            WriteableBitmap buff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null);
            WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null);
            IplImage        _mapX, _mapY;
            var             dst = new IplImage();


            Task.Run(() =>
            {
                //Thread.Sleep(1000);

                try
                {
                    cap = Cv.CreateCameraCapture(CameraNumber); // カメラのキャプチ

                    cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                    cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);

                    SetWb();

                    var dis = App.Current.Dispatcher;

                    while (canExecute)             // 任意のキーが入力されるまでカメラ映像を表示
                    {
                        try
                        {
                            Thread.Sleep(100);
                            if (FlagPropChange)
                            {
                                cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                                cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);
                                cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness);
                                cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast);
                                cap.SetCaptureProperty(CaptureProperty.Hue, Hue);
                                cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation);
                                cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness);
                                cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma);
                                cap.SetCaptureProperty(CaptureProperty.Gain, Gain);
                                cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出
                                //cap.SetCaptureProperty(CaptureProperty.WhiteBalance, White);//Opencvsharp2/3 非対応

                                dis.BeginInvoke(new Action(() =>
                                {
                                    try
                                    {
                                        FlagPropChange = false;
                                    }
                                    catch
                                    {
                                        MessageBox.Show("カメラ異常");
                                        canExecute = false;
                                    }
                                }));
                            }

                            im = Cv.QueryFrame(cap);//画像取得
                            if (im == null)
                            {
                                continue;
                            }
                            if (IsActive == true)
                            {
                                IsActive = false;
                            }

                            dst = new IplImage(im.Size, im.Depth, im.NChannels);

                            //set rectify data
                            _mapX = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            _mapY = Cv.CreateImage(im.Size, BitDepth.F32, 1);
                            Cv.InitUndistortMap(_fileIntrinsic, _fileDistortion, _mapX, _mapY);
                            Cv.Remap(im, dst, _mapX, _mapY);


                            //傾き補正
                            CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2);
                            CvMat affineMatrix  = Cv.GetRotationMatrix2D(center, Theta, 1.0);
                            //Cv.WarpAffine(im, im, affineMatrix);
                            Cv.WarpAffine(dst, dst, affineMatrix);

                            if (FlagTestPic)
                            {
                                imageForTest = dst.Clone();
                                FlagTestPic  = false;
                            }

                            if (FlagLabeling)
                            {
                                var imageForLabeling = new IplImage(WIDTH, HEIGHT, BitDepth.U8, 3);
                                var imbuff           = dst.Clone();
                                var Binbuff          = Binary(imbuff);
                                blobs = new CvBlobs(Binbuff);

                                blobs.RenderBlobs(dst, imageForLabeling);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForLabeling, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                    imageForLabeling.Dispose();
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }

                                continue;
                            }



                            //二値化表示
                            if (FlagBin)
                            {
                                var imbuff  = dst.Clone();
                                var Binbuff = Binary(imbuff);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得
                                    source = grayBuff;
                                }));
                                continue;
                            }



                            //グリッド表示
                            if (FlagGrid)
                            {
                                foreach (var i in Enumerable.Range(0, HEIGHT / 10))
                                {
                                    var 行  = i * 10;
                                    var p1 = new CvPoint(0, 行);
                                    var p2 = new CvPoint(WIDTH, 行);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                foreach (var j in Enumerable.Range(0, WIDTH / 10))
                                {
                                    var 列  = j * 10;
                                    var p1 = new CvPoint(列, 0);
                                    var p2 = new CvPoint(列, HEIGHT);
                                    dst.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }



                            if (FlagFrame)
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(dst);
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagNgFrame)//試験NGの場合
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeNgFrame(imageForTest);
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }
                            }

                            if (FlagHsv)
                            {
                                GetHsv(dst);
                            }

                            //すべてのフラグがfalseならノーマル表示する
                            dis.BeginInvoke(new Action(() =>
                            {
                                try
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(dst, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }
                                catch
                                {
                                    CamState   = false;
                                    canExecute = false;
                                }
                            }));
                        }
                        catch
                        {
                            //例外無視する処理を追加
                            CamState   = false;
                            canExecute = false;
                        }
                    }
                }
                catch
                {
                    CamState = false;
                }
                finally
                {
                    if (cap != null)
                    {
                        cap.Dispose();
                        cap = null;
                    }
                    IsActive = false;
                    Stopped  = true;
                }
            });
        }
Ejemplo n.º 11
0
        private CvBlobs PreProcessImage2_old(IplImage img)
        {
            CvBlobs       blobs     = null;
            IplConvKernel element   = null;
            IplImage      temp      = null;
            IplImage      dest      = null;
            IplImage      tmpImage  = null;
            IplImage      tmpImage2 = null;
            IplImage      labelImg  = null;

            try
            {
                element  = Cv.CreateStructuringElementEx(180, 5, 90, 1, ElementShape.Rect, null);
                tmpImage = new IplImage(img.Size, BitDepth.U8, 1);
                temp     = tmpImage.Clone();
                dest     = tmpImage.Clone();
                img.CvtColor(tmpImage, ColorConversion.RgbaToGray);
                tmpImage.Rectangle(new CvPoint(0, 0), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)((tmpImage.Size.Height / 9) * 3)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint(0, (Int32)((tmpImage.Size.Height / 5) * 4)), new CvPoint((Int32)(tmpImage.Size.Width), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                tmpImage.Rectangle(new CvPoint((Int32)((tmpImage.Size.Width / 9) * 7), 0), new CvPoint((Int32)((tmpImage.Size.Width)), (Int32)(tmpImage.Size.Height)), new CvScalar(255, 255, 255), -1);
                Cv.Smooth(tmpImage, tmpImage, SmoothType.Gaussian);
                Cv.MorphologyEx(tmpImage, dest, temp, element, MorphologyOperation.TopHat, 1);
                Cv.Threshold(dest, tmpImage, 128, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(tmpImage, dest, SmoothType.Median);


                labelImg  = new IplImage(img.Size, CvBlobLib.DepthLabel, 1);
                blobs     = new CvBlobs();
                tmpImage2 = tmpImage.Clone();
                CvBlobLib.Label(tmpImage2, labelImg, blobs);

                //Cv.ReleaseImage(tmpImage);
                //tmpImage = img.Clone();
                //blobs.RenderBlobs(labelImg, img, tmpImage);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsNOFiltered.png");


                CvBlobLib.FilterByArea(blobs, 850, 4850);
                Cv.ReleaseImage(tmpImage);
                tmpImage = img.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, img, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);
                //tmpImage.SaveImage(@"c:\temp\newImages\RenderBlobsFiltered.png");
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != dest)
                {
                    Cv.ReleaseImage(dest);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }

                if (null != tmpImage2)
                {
                    Cv.ReleaseImage(tmpImage2);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }
            }


            return(blobs);
        }
Ejemplo n.º 12
0
        private CvBlobs PreProcessImage1(IplImage mainSubImage, IplImage imgGray)
        {
            CvBlobs  blobs    = null;
            IplImage tmpImage = null;
            IplImage gray     = null;
            IplImage tgray    = null;
            IplImage labelImg = null;
            IplImage temp     = null;

            try
            {
                tgray = imgGray.Clone();
                gray  = new IplImage(tgray.Size, tgray.Depth, 1);
                Cv.Smooth(tgray, tgray, SmoothType.Gaussian);
                Cv.Canny(tgray, gray, 500, 2, ApertureSize.Size5);
                temp = gray.Clone();
                //IplConvKernel element = Cv.CreateStructuringElementEx(5, 1, 3, 0, ElementShape.Rect, null);
                IplConvKernel element = Cv.CreateStructuringElementEx(7, 1, 3, 0, ElementShape.Rect, null);
                Cv.MorphologyEx(gray, gray, temp, element, MorphologyOperation.BlackHat, 1);
                Cv.Threshold(gray, gray, 100, 255, ThresholdType.Binary | ThresholdType.Otsu);
                Cv.Smooth(gray, gray, SmoothType.Gaussian);


                labelImg = new IplImage(mainSubImage.Size, CvBlobLib.DepthLabel, 1);
                blobs    = new CvBlobs();
                CvBlobLib.Label(gray, labelImg, blobs);
                CvBlobLib.FilterByArea(blobs, 1550, 4850);

                tmpImage = mainSubImage.Clone();
                //CvTracks tracks = new CvTracks();
                //CvBlobLib.UpdateTracks(blobs, tracks, 200.0, 5);
                //CvBlobLib.RenderTracks(tracks, tmpImage, tmpImage, RenderTracksMode.ID);
                blobs.RenderBlobs(labelImg, mainSubImage, tmpImage, RenderBlobsMode.BoundingBox | RenderBlobsMode.Angle);

                /*
                 * img.SetROI(subImageRect);
                 * Cv.Copy(tmpImage, img);
                 * img.ResetROI();
                 * Cv.ReleaseImage(tmpImage);
                 *
                 */
            }
            finally
            {
                if (null != temp)
                {
                    Cv.ReleaseImage(temp);
                }

                if (null != tgray)
                {
                    Cv.ReleaseImage(tgray);
                }

                if (null != gray)
                {
                    Cv.ReleaseImage(gray);
                }

                if (null != labelImg)
                {
                    Cv.ReleaseImage(labelImg);
                }

                if (null != tmpImage)
                {
                    Cv.ReleaseImage(tmpImage);
                }
            }

            return(blobs);
        }
Ejemplo n.º 13
0
 /// <summary>
 /// 輪郭を得る
 /// </summary>
 /// <param name="img"></param>
 /// <param name="storage"></param>
 /// <returns></returns>
 private CvSeq<CvPoint> FindContours(IplImage img, CvMemStorage storage)
 {
     // 輪郭抽出
     CvSeq<CvPoint> contours;
     using (IplImage imgClone = img.Clone())
     {
         Cv.FindContours(imgClone, storage, out contours);
         if (contours == null)
         {
             return null;
         }
         contours = Cv.ApproxPoly(contours, CvContour.SizeOf, storage, ApproxPolyMethod.DP, 3, true);
     }
     // 一番長そうな輪郭のみを得る
     CvSeq<CvPoint> max = contours;
     for (CvSeq<CvPoint> c = contours; c != null; c = c.HNext)
     {
         if (max.Total < c.Total)
         {
             max = c;
         }
     }
     return max;
 }
Ejemplo n.º 14
0
        public IplImage HoughLines_Line(IplImage src, int canny1, int canny2, int thresh)
        {
            // cvHoughLines2
            // 확률적 허프 변환을 지정해 선분의 검출을 실시한다
            List <CvPoint> LinePoints = new List <CvPoint>();

            // (1) 화상 읽기
            using (IplImage srcImgStd = src.Clone())
                using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray);

                    // (2) 허프변환을 위한 캐니엣지 처리
                    //Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3);
                    Cv.Canny(srcImgGray, srcImgGray, canny1, canny2, ApertureSize.Size3);

                    houghLine = srcImgGray.Clone();

                    using (CvMemStorage storage = new CvMemStorage())
                    {
                        LinePoints.Clear();
                        // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기
                        CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, thresh, 0, 0);
                        //int limit = Math.Min(lines.Total, 10);
                        for (int i = 0; i < Math.Min(lines.Total, 3); i++)
                        {
                            CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value;



                            float rho   = elem.Rho;
                            float theta = elem.Theta;

                            double a = Math.Cos(theta), b = Math.Sin(theta);
                            double x0 = a * rho, y0 = b * rho;

                            CvPoint pt1, pt2;

                            //pt1.X = Cv.Round(x0 + 1000*(-b));
                            //pt1.Y = Cv.Round(y0 + 1000*(a));
                            //pt2.X = Cv.Round(x0 - 1000*(-b));
                            //pt2.Y = Cv.Round(y0 - 1000*(a));

                            pt1.X = Cv.Round(x0 + srcImgStd.Width * (-b));
                            pt1.Y = Cv.Round(y0 + srcImgStd.Height * (a));
                            pt2.X = Cv.Round(x0 - srcImgStd.Width * (-b));
                            pt2.Y = Cv.Round(y0 - srcImgStd.Height * (a));

                            if (pt1.X < 0)
                            {
                                pt1.X = 0;
                                pt2.X = src.Width;
                            }
                            else if (pt2.X < 0)
                            {
                                pt1.X = src.Width;
                                pt2.X = 0;
                            }

                            if (pt1.Y < 0)
                            {
                                pt1.Y = 0;
                                pt2.Y = src.Height;
                            }
                            else if (pt2.Y < 0)
                            {
                                pt1.Y = src.Height;
                                pt2.Y = 0;
                            }

                            //Trace.WriteLine(pt1.X.ToString("000.00000  ") + pt1.Y.ToString("000.00000  ") + pt2.X.ToString("000.00000  ") + pt2.Y.ToString("000.00000"));
                            srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0);

                            LinePoints.Add(pt1);
                            LinePoints.Add(pt2);

                            houghLine = srcImgStd.Clone();
                        }
                    }
                }
            return(houghLine);
        }
Ejemplo n.º 15
0
 /// <summary>
 /// グレースケール画像(8bit)を二値画像(8bit)に変換する
 /// mImageGray --> mImageBin
 /// </summary>
 private void convertGrayToBin()
 {
     mImageBin = mImageGray.Clone();
     Cv.Threshold(mImageGray, mImageBin, 0, 255, ThresholdType.Otsu);
 }
Ejemplo n.º 16
0
        //HoughLines2()
        //확률적 허프 변환을 지정해 선분의 검출을 실시한다
        //1. CvArr* image : 변환을 할 이미지가 들어간다.
        //2. void* line_storage : 라인을 저장할 공간
        //3. int method : 허프변환에는 3개의 방법이 있다 . 이 방법을 설정하는 인자
        //4. double rho / double theta : 이 둘은 얼마나 조밀한 단위(?)를 사용할 것인가를 정하는 인자 이다. (예로 rho=1이라면 1픽셀단위로 조사를 하고 theta = PI/180 이라면 1도 단위로 조사를 하겠다는 뜻)
        //5. int threshold : 허프 공간상에 그려지는 곡선들이 중첩되는 부분을 이용해서 직선을 검출하는데 threshold 값보다 중첩되는 갯수가 많으면 직선으로 간주한다는 뜻이다. 숫자가 커지면 당연히 더 직선의 기준이 엄격하게 된다.
        //6.double param1 : probabilistic  일 경우  직선의 최소 길이를 설정할 수 있다.
        //7.double param2 : probabilistic  일 경우 직선의 최대 길이를 설정할 수 있다.
        public IplImage HoughLines(IplImage src, IplImage boxImage, ref IplImage resultImage)
        {
            // cvHoughLines2
            // 확률적 허프 변환을 지정해 선분의 검출을 실시한다
            IplImage orImage = boxImage.Clone();

            // (1) 화상 읽기
            using (IplImage srcImgStd = src.Clone())
                using (IplImage srcImgGray = new IplImage(src.Size, BitDepth.U8, 1))
                {
                    Cv.CvtColor(srcImgStd, srcImgGray, ColorConversion.BgrToGray);

                    // (2) 허프변환을 위한 캐니엣지 처리
                    Cv.Canny(srcImgGray, srcImgGray, 50, 200, ApertureSize.Size3);

                    houghLine = srcImgGray.Clone();
                    int lineMinLength = srcImgStd.Width / 2;
                    int lineMaxLength = srcImgStd.Width;

                    using (CvMemStorage storage = new CvMemStorage())
                    {
                        // (3) 표준적 허프 변환에 의한 선의 검출과 검출된 선 그리기
                        CvSeq lines = srcImgGray.HoughLines2(storage, HoughLinesMethod.Standard, 1, Math.PI / 180, 50, 0, 0);
                        int   limit = Math.Min(lines.Total, 10);
                        for (int i = 0; i < limit; i++)
                        {
                            CvLineSegmentPolar elem = lines.GetSeqElem <CvLineSegmentPolar>(i).Value;
                            float rho   = elem.Rho;
                            float theta = elem.Theta;

                            double a  = Math.Cos(theta);
                            double b  = Math.Sin(theta);
                            double x0 = a * rho;
                            double y0 = b * rho;

                            CvPoint pt1 = new CvPoint {
                                X = Cv.Round(x0 + src.Width * (-b)), Y = Cv.Round(y0 + src.Height * (a))
                            };
                            CvPoint pt2 = new CvPoint {
                                X = Cv.Round(x0 - src.Width * (-b)), Y = Cv.Round(y0 - src.Height * (a))
                            };

                            if (pt1.X < 1)
                            {
                                pt1.X = 0;
                                pt2.X = src.Width;
                            }

                            if (pt2.X < 1)
                            {
                                pt1.X = src.Width;
                                pt2.X = 0;
                            }

                            if (pt1.Y < 1)
                            {
                                pt1.Y = 0;
                                pt2.Y = src.Height;
                            }

                            if (pt2.Y < 1)
                            {
                                pt1.Y = src.Height;
                                pt2.Y = 0;
                            }

                            srcImgStd.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0);
                            houghLine = srcImgStd.Clone();

                            orImage.Line(pt1, pt2, CvColor.Red, 1, LineType.AntiAlias, 0);
                            houghLine = orImage.Clone();

                            return(houghLine);
                        }
                    }
                }
            return(houghLine);
        }
Ejemplo n.º 17
0
 /// <summary>
 /// 二値画像(8bit)を7セグ画像(8bit)に変換する
 /// mImageGray --> mImageBin, m7SegPattern
 /// </summary>
 /// <param name="threshold">閾値(0-100)</param>
 private void convertBinTo7Seg(int threshold)
 {
     mImage7Seg = mImageBin.Clone();
     mImage7Seg.Zero();
     m7SegPattern = match7SegMatrix(mImageBin, mImage7Seg, threshold);
 }
Ejemplo n.º 18
0
        private void CaptureCameraCallback()
        {
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 1;
            CvSize       MinSize      = new CvSize(30, 30);

            CvCapture cap = CvCapture.FromCamera(1);
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_eye.xml");

            while (true)
            {
                IplImage img = cap.QueryFrame();
                //IplImage src = new IplImage(new CvSize(600, 400),BitDepth.U8,1);
                //Cv.Resize(img, src, Interpolation.Linear);
                CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, MinSize);

                foreach (CvAvgComp eye in eyes.AsParallel())
                {
                    img.DrawRect(eye.Rect, CvColor.Red);

                    if (eye.Rect.Left > pctCvWindow.Width / 2)
                    {
                        try
                        {
                            IplImage rightEyeImg1 = img.Clone();
                            Cv.SetImageROI(rightEyeImg1, eye.Rect);
                            IplImage rightEyeImg2 = Cv.CreateImage(eye.Rect.Size, rightEyeImg1.Depth, rightEyeImg1.NChannels);
                            Cv.Copy(rightEyeImg1, rightEyeImg2, null);
                            Cv.ResetImageROI(rightEyeImg1);


                            Bitmap rightEyeBm = BitmapConverter.ToBitmap(rightEyeImg2);
                            pctRightEye.Image = rightEyeBm;
                        }
                        catch { }
                    }
                    else
                    {
                        try
                        {
                            IplImage leftEyeImg1 = img.Clone();
                            Cv.SetImageROI(leftEyeImg1, eye.Rect);
                            IplImage leftEyeImg2 = Cv.CreateImage(eye.Rect.Size, leftEyeImg1.Depth, leftEyeImg1.NChannels);
                            Cv.Copy(leftEyeImg1, leftEyeImg2, null);
                            Cv.ResetImageROI(leftEyeImg1);

                            Bitmap leftEyeBm = BitmapConverter.ToBitmap(leftEyeImg2);
                            //pctLeftEye.Image = leftEyeBm;
                            //pctLeftEye.Visible = false;
                        }catch {}
                    }
                }

                Bitmap bm = BitmapConverter.ToBitmap(img);
                bm.SetResolution(pctCvWindow.Width, pctCvWindow.Height);
                pctCvWindow.Image = bm;

                img = null;
                bm  = null;
            }
        }
Ejemplo n.º 19
0
 public Plate(IplImage pImage)
 {
     image           = pImage.Clone();
     plateCopy       = new Plate(image, true);
     plateCopy.image = AdaptiveThresholding(plateCopy.image);
 }
        /// <summary>
        /// IplImageをWriteableBitmapに変換する.
        /// 返却値を新たに生成せず引数で指定したWriteableBitmapに格納するので、メモリ効率が良い。
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="dst">変換結果を設定するWriteableBitmap</param>
#else
        /// <summary>
        /// Converts IplImage to WriteableBitmap.
        /// This method is more efficient because new instance of WriteableBitmap is not allocated.
        /// </summary>
        /// <param name="src">Input IplImage</param>
        /// <param name="dst">Output WriteableBitmap</param>
#endif
        public static void ToWriteableBitmap(IplImage src, WriteableBitmap dst)
        {
            if (src == null)
                throw new ArgumentNullException("src");
            if (dst == null)
                throw new ArgumentNullException("dst");
            if (src.Width != dst.PixelWidth || src.Height != dst.PixelHeight)
                throw new ArgumentException("size of src must be equal to size of dst");
            //if (src.Depth != BitDepth.U8)
                //throw new ArgumentException("bit depth of src must be BitDepth.U8", "src");

            int w = src.Width;
            int h = src.Height;
            int bpp = dst.Format.BitsPerPixel;

            int channels = GetOptimumChannels(dst.Format);   
            if (src.NChannels != channels)
            {
                throw new ArgumentException("PixelFormat of dst is invalid", "dst");
            }

            // 左下原点の場合は上下反転する
            IplImage ipl = null;
            if (src.Origin == ImageOrigin.TopLeft)
            {
                ipl = src;
            }
            else
            {
                ipl = src.Clone();
                Cv.Flip(src, ipl, FlipMode.X);
            }

            if (bpp == 1)
            {
                unsafe
                {
                    // 手作業で移し替える
                    int stride = w / 8 + 1;
                    if (stride < 2)
                    {
                        stride = 2;
                    }
                    byte[] pixels = new byte[h * stride];
                    byte* p = (byte*)(ipl.ImageData.ToPointer());
                    int x = 0;
                    int y;
                    int byte_pos;
                    int offset;
                    byte b;
                    int i;
                    int widthStep = src.WidthStep;
                    for (y = 0; y < h; y++)
                    {
                        offset = y * stride;
                        for (byte_pos = 0; byte_pos < stride; byte_pos++)
                        {
                            if (x < w)
                            {
                                b = 0;
                                // 現在の位置から横8ピクセル分、ビットがそれぞれ立っているか調べ、1つのbyteにまとめる
                                for (i = 0; i < 8; i++)
                                {
                                    b <<= 1;
                                    if (x < w && p[widthStep * y + x] != 0)
                                    {
                                        b |= 1;
                                    }
                                    x++;
                                }
                                pixels[offset + byte_pos] = b;
                            }
                        }
                        x = 0;
                    }
                    dst.WritePixels(new Int32Rect(0, 0, w, h), pixels, stride, 0);
                }

                /*} else if(bpp == 8){
                    int stride = w;
                    array<Byte>^ pixels = gcnew array<Byte>(h * stride);
                    byte* p = (byte*)(void*)src->ImageData;
                    for (int y=0; y<h; y++) {
                        for(int x=0; x<w; x++){
                            pixels[y * stride + x] = p[src->WidthStep * y + x];
                        }
                    }
                    dst->WritePixels(Int32Rect(0, 0, w, h), pixels, stride, 0);
                */
            }
            else
            {
                dst.WritePixels(new Int32Rect(0, 0, w, h), ipl.ImageData, ipl.ImageSize, ipl.WidthStep);
            }

            if (src.Origin == ImageOrigin.BottomLeft)
            {
                ipl.Dispose();
            }
        }
Ejemplo n.º 21
0
 public Plate(IplImage pImage, Boolean isCopy)
 {
     image = pImage.Clone();
 }
Ejemplo n.º 22
0
        EyeRects Recognize(PictureBox bb, Panel container)
        {
            const double ScaleFactor  = 2.5;
            const int    MinNeighbors = 1;
            CvSize       MinSize      = new CvSize(30, 30);

            //CvCapture cap = CvCapture.FromCamera(1);
            CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile("haarcascade_eye.xml");
            //IplImage img = cap.QueryFrame();
            IplImage          img  = IplImage.FromBitmap(new Bitmap(bb.Image));
            CvSeq <CvAvgComp> eyes = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), ScaleFactor, MinNeighbors, HaarDetectionType.DoCannyPruning, MinSize);

            img.DrawRect(new CvRect(30, 30, bb.Image.Width - 30, bb.Image.Height - 60), CvColor.Yellow);
            var rcs = new EyeRects();

            foreach (CvAvgComp eye in eyes.AsParallel())
            {
                rcs.AddRect(eye.Rect);
                img.DrawRect(eye.Rect, CvColor.Yellow);

                if (eye.Rect.Left > pn.Width / 2)
                {
                    try
                    {
                        IplImage rightEyeImg1 = img.Clone();
                        Cv.SetImageROI(rightEyeImg1, eye.Rect);
                        IplImage rightEyeImg2 = Cv.CreateImage(eye.Rect.Size, rightEyeImg1.Depth, rightEyeImg1.NChannels);
                        Cv.Copy(rightEyeImg1, rightEyeImg2, null);
                        Cv.ResetImageROI(rightEyeImg1);

                        //Bitmap rightEyeBm = BitmapConverter.ToBitmap(rightEyeImg2);
                        //spMain.Panel2.Image = rightEyeBm;
                    }
                    catch { }
                }
                else
                {
                    try
                    {
                        IplImage leftEyeImg1 = img.Clone();
                        Cv.SetImageROI(leftEyeImg1, eye.Rect);
                        IplImage leftEyeImg2 = Cv.CreateImage(eye.Rect.Size, leftEyeImg1.Depth, leftEyeImg1.NChannels);
                        Cv.Copy(leftEyeImg1, leftEyeImg2, null);
                        Cv.ResetImageROI(leftEyeImg1);

                        //Bitmap leftEyeBm = BitmapConverter.ToBitmap(leftEyeImg2);
                        //pctLeftEye.Image = leftEyeBm;
                    }
                    catch { }
                }
            }
            Bitmap bm = BitmapConverter.ToBitmap(img);

            //bm.SetResolution(1500, 1500);
            //pctCvWindow.Image = bm;
            //PictureBox pb = new PictureBox();
            //pb.Image = bm;
            //pb.Image = bm;
            bb.Image = bm;

            //spMain.Panel2.Controls.Clear();
            if (pn.Controls.Count < 1)
            {
                pn.Controls.Add(bb);
                //bb.Click += bb_Click;
            }
            bb.Dock = DockStyle.Fill;
            //pb.Image = bm;
            img = null;
            bm  = null;
            return(rcs);
        }
Ejemplo n.º 23
0
        public uEyeCapture()
        {
            source = Observable.Create <uEyeDataFrame>(observer =>
            {
                var deviceId = DeviceId;
                var camera   = new Camera();
                try
                {
                    var statusRet = deviceId.HasValue ? camera.Init(deviceId.Value | (int)DeviceEnumeration.UseDeviceID) : camera.Init();
                    HandleResult(statusRet);

                    if (!string.IsNullOrEmpty(ConfigFile))
                    {
                        statusRet = camera.Parameter.Load(ConfigFile);
                        HandleResult(statusRet);
                    }

                    statusRet = camera.Memory.Allocate();
                    HandleResult(statusRet);

                    Int32 s32MemID;
                    statusRet = camera.Memory.GetActive(out s32MemID);
                    HandleResult(statusRet);

                    int frameWidth;
                    statusRet = camera.Memory.GetWidth(s32MemID, out frameWidth);
                    HandleResult(statusRet);

                    int frameHeight;
                    statusRet = camera.Memory.GetHeight(s32MemID, out frameHeight);
                    HandleResult(statusRet);

                    int s32Bpp;
                    statusRet = camera.Memory.GetBitsPerPixel(s32MemID, out s32Bpp);
                    HandleResult(statusRet);

                    ColorMode colorMode;
                    statusRet = camera.PixelFormat.Get(out colorMode);
                    HandleResult(statusRet);

                    var frameSize = new OpenCV.Net.Size(frameWidth, frameHeight);
                    var depth     = GetImageDepth(colorMode);
                    var channels  = s32Bpp / (int)depth;
                    statusRet     = camera.Memory.Allocate();
                    HandleResult(statusRet);

                    camera.EventFrame += (sender, e) =>
                    {
                        Int32 activeMemID;
                        camera.Memory.GetActive(out activeMemID);

                        IntPtr imageBuffer;
                        camera.Memory.ToIntPtr(activeMemID, out imageBuffer);

                        ImageInfo imageInfo;
                        camera.Information.GetImageInfo(activeMemID, out imageInfo);

                        using (var output = new IplImage(frameSize, depth, channels, imageBuffer))
                        {
                            observer.OnNext(new uEyeDataFrame(output.Clone(), imageInfo));
                        }
                    };

                    statusRet = camera.Acquisition.Capture();
                    HandleResult(statusRet);
                }
                catch
                {
                    camera.Exit();
                    throw;
                }

                return(() =>
                {
                    camera.Acquisition.Stop();
                    camera.Exit();
                });
            })
                     .PublishReconnectable()
                     .RefCount();
        }
Ejemplo n.º 24
0
 public IplImage Filter(IplImage sImage, ColorConversion code)
 {
     fImage = sImage.Clone();
     Cv.CvtColor(sImage, fImage, code);
     return(Filter(fImage));
 }
Ejemplo n.º 25
0
        public void Start()
        {
            IsActive = true;
            StopFlag = true;
            var             im       = new IplImage(); // カメラ画像格納用の変数
            WriteableBitmap buff     = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Bgr24, null);
            WriteableBitmap grayBuff = new WriteableBitmap(WIDTH, HEIGHT, 96, 96, PixelFormats.Gray8, null);

            Task.Run(() =>
            {
                using (var cap = Cv.CreateCameraCapture(CameraNumber)) // カメラのキャプチャ
                {
                    Dispatcher dis = App.Current.Dispatcher;

                    //カメラ起動後にWBを書き換えないと、自動チェックが外れないため下記の処理を追加する
                    Wb = 3000;
                    Thread.Sleep(100);
                    Wb = 3100;
                    Thread.Sleep(100);
                    Wb = State.camProp.Whitebalance;

                    while (StopFlag)             // 任意のキーが入力されるまでカメラ映像を表示
                    {
                        try
                        {
                            Thread.Sleep(100);
                            if (FlagPropChange)
                            {
                                cap.SetCaptureProperty(CaptureProperty.FrameWidth, WIDTH);
                                cap.SetCaptureProperty(CaptureProperty.FrameHeight, HEIGHT);
                                cap.SetCaptureProperty(CaptureProperty.Brightness, Brightness);
                                cap.SetCaptureProperty(CaptureProperty.Contrast, Contrast);
                                cap.SetCaptureProperty(CaptureProperty.Hue, Hue);
                                cap.SetCaptureProperty(CaptureProperty.Saturation, Saturation);
                                cap.SetCaptureProperty(CaptureProperty.Sharpness, Sharpness);
                                cap.SetCaptureProperty(CaptureProperty.Gamma, Gamma);
                                cap.SetCaptureProperty(CaptureProperty.Gain, Gain);
                                cap.SetCaptureProperty(CaptureProperty.Exposure, Exposure);//露出
                                dis.BeginInvoke(new Action(() => { FlagPropChange = false; }));
                            }

                            im = Cv.QueryFrame(cap);//画像取得
                            if (im == null)
                            {
                                continue;
                            }
                            if (IsActive == true)
                            {
                                IsActive = false;
                            }


                            //傾き補正
                            CvPoint2D32f center = new CvPoint2D32f(WIDTH / 2, HEIGHT / 2);
                            CvMat affineMatrix  = Cv.GetRotationMatrix2D(center, Theta, 1.0);
                            Cv.WarpAffine(im, im, affineMatrix);



                            //二値化表示
                            if (FlagBin)
                            {
                                var imbuff  = im.Clone();
                                var Binbuff = Binary(imbuff);

                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(Binbuff);
                                    WriteableBitmapConverter.ToWriteableBitmap(Binbuff, grayBuff);// カメラからフレーム(画像)を取得
                                    source = grayBuff;
                                    imbuff.Dispose();
                                    Binbuff.Dispose();
                                }));
                                continue;
                            }



                            //グリッド表示
                            if (FlagGrid)
                            {
                                foreach (var i in Enumerable.Range(0, HEIGHT / 10))
                                {
                                    var 行  = i * 10;
                                    var p1 = new CvPoint(0, 行);
                                    var p2 = new CvPoint(WIDTH, 行);
                                    im.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                foreach (var j in Enumerable.Range(0, WIDTH / 10))
                                {
                                    var 列  = j * 10;
                                    var p1 = new CvPoint(列, 0);
                                    var p2 = new CvPoint(列, HEIGHT);
                                    im.Line(p1, p2, CvColor.Aquamarine, 1, LineType.AntiAlias, 0);
                                }
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagCross)
                            {
                                int Rad = 20;
                                var p0  = new CvPoint(CrossX, CrossY);
                                var pR  = new CvPoint(CrossX + Rad, CrossY);
                                var pL  = new CvPoint(CrossX - Rad, CrossY);
                                var pO  = new CvPoint(CrossX, CrossY - Rad);
                                var pU  = new CvPoint(CrossX, CrossY + Rad);
                                im.Line(p0, pR, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pL, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pO, CvColor.Red, 1, LineType.AntiAlias, 0);
                                im.Line(p0, pU, CvColor.Red, 1, LineType.AntiAlias, 0);
                                dis.BeginInvoke(new Action(() =>
                                {
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagFrame)
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeFrame(im);
                                    WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                    source = buff;
                                }));
                                continue;
                            }

                            if (FlagNgFrame)//試験NGの場合
                            {
                                dis.BeginInvoke(new Action(() =>
                                {
                                    MakeNgFrame(imageForTest);
                                    WriteableBitmapConverter.ToWriteableBitmap(imageForTest, source);// カメラからフレーム(画像)を取得
                                }));

                                while (FlagNgFrame)
                                {
                                    ;
                                }
                            }


                            if (FlagHsv)
                            {
                                GetHsv(im);
                            }

                            if (FlagTestPic)
                            {
                                imageForTest = im.Clone();
                                FlagTestPic  = false;
                            }


                            //すべてのフラグがfalseならノーマル表示する
                            dis.BeginInvoke(new Action(() =>
                            {
                                WriteableBitmapConverter.ToWriteableBitmap(im, buff);// カメラからフレーム(画像)を取得
                                source = buff;
                            }
                                                       ));
                        }
                        catch
                        {
                            StopFlag = false;
                            MessageBox.Show("aaaa");
                            //カメラがたまにコケるので例外無視する処理を追加
                        }
                    }
                }
            });
        }
        /// <summary>
        /// IplImageをWriteableBitmapに変換する.
        /// 返却値を新たに生成せず引数で指定したWriteableBitmapに格納するので、メモリ効率が良い。
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="dst">変換結果を設定するWriteableBitmap</param>
#else
        /// <summary>
        /// Converts IplImage to WriteableBitmap.
        /// This method is more efficient because new instance of WriteableBitmap is not allocated.
        /// </summary>
        /// <param name="src">Input IplImage</param>
        /// <param name="dst">Output WriteableBitmap</param>
#endif
        public static void ToWriteableBitmap(IplImage src, WriteableBitmap dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }
            if (src.Width != dst.PixelWidth || src.Height != dst.PixelHeight)
            {
                throw new ArgumentException("size of src must be equal to size of dst");
            }
            //if (src.Depth != BitDepth.U8)
            //throw new ArgumentException("bit depth of src must be BitDepth.U8", "src");

            int w   = src.Width;
            int h   = src.Height;
            int bpp = dst.Format.BitsPerPixel;

            int channels = GetOptimumChannels(dst.Format);

            if (src.NChannels != channels)
            {
                throw new ArgumentException("PixelFormat of dst is invalid", "dst");
            }

            // 左下原点の場合は上下反転する
            IplImage ipl = null;

            if (src.Origin == ImageOrigin.TopLeft)
            {
                ipl = src;
            }
            else
            {
                ipl = src.Clone();
                Cv.Flip(src, ipl, FlipMode.X);
            }

            if (bpp == 1)
            {
                unsafe
                {
                    // 手作業で移し替える
                    int stride = w / 8 + 1;
                    if (stride < 2)
                    {
                        stride = 2;
                    }
                    byte[] pixels = new byte[h * stride];
                    byte * p      = (byte *)(ipl.ImageData.ToPointer());
                    int    x      = 0;
                    int    y;
                    int    byte_pos;
                    int    offset;
                    byte   b;
                    int    i;
                    int    widthStep = src.WidthStep;
                    for (y = 0; y < h; y++)
                    {
                        offset = y * stride;
                        for (byte_pos = 0; byte_pos < stride; byte_pos++)
                        {
                            if (x < w)
                            {
                                b = 0;
                                // 現在の位置から横8ピクセル分、ビットがそれぞれ立っているか調べ、1つのbyteにまとめる
                                for (i = 0; i < 8; i++)
                                {
                                    b <<= 1;
                                    if (x < w && p[widthStep * y + x] != 0)
                                    {
                                        b |= 1;
                                    }
                                    x++;
                                }
                                pixels[offset + byte_pos] = b;
                            }
                        }
                        x = 0;
                    }
                    dst.WritePixels(new Int32Rect(0, 0, w, h), pixels, stride, 0);
                }

                /*} else if(bpp == 8){
                 *  int stride = w;
                 *  array<Byte>^ pixels = gcnew array<Byte>(h * stride);
                 *  byte* p = (byte*)(void*)src->ImageData;
                 *  for (int y=0; y<h; y++) {
                 *      for(int x=0; x<w; x++){
                 *          pixels[y * stride + x] = p[src->WidthStep * y + x];
                 *      }
                 *  }
                 *  dst->WritePixels(Int32Rect(0, 0, w, h), pixels, stride, 0);
                 */
            }
            else
            {
                dst.WritePixels(new Int32Rect(0, 0, w, h), ipl.ImageData, ipl.ImageSize, ipl.WidthStep);
            }

            if (src.Origin == ImageOrigin.BottomLeft)
            {
                ipl.Dispose();
            }
        }
Ejemplo n.º 27
0
 public IplImage PreProcess(IplImage image)
 {
     return(image.Clone());
 }