Esempio n. 1
0
        public static void MainLoop()
        {
            PROCESS.SetWindowPos(0, 0, 800, 600);
            Thread.Sleep(200);
            foreach (var image in ImageList)
            {
                CvMat screen = Utils.TakeScreenshot().ToMat().ToCvMat();
                Screenshot = new CvMat(screen.Rows, screen.Cols, MatrixType.U8C1);
                screen.CvtColor(Screenshot, ColorConversion.BgraToGray);

                Result =
                    Cv.CreateImage(Cv.Size(Screenshot.Width - image.Width + 1, Screenshot.Height - image.Height + 1),
                                   BitDepth.F32, 1);
                Cv.MatchTemplate(Screenshot, image, Result, MatchTemplateMethod.CCoeffNormed);

                /*Screenshot.SaveImage("data/screenshot.png");
                 * image.SaveImage("data/image.png");*/
                Cv.Normalize(Result, Result, 0, 1, NormType.MinMax);
                Cv.MinMaxLoc(Result, out MinAcc, out MaxAcc, out MinPos, out MaxPos, null);
                Console.WriteLine(MaxAcc);
                if (MaxAcc >= 0.75)
                {
                    Position = new Point(MaxPos.X, MaxPos.Y);
                    Utils.MoveMouse(Position);
                    Thread.Sleep(15);
                    Utils.LeftClick();
                    Thread.Sleep(100);
                    MaxAcc = 0;
                }
                Result.Dispose();
            }
        }
Esempio n. 2
0
        /// <summary>
        /// 原点(直流成分)が画像の中心にくるように,画像の象限を入れ替える関数.
        /// srcArr, dstArr は同じサイズ,タイプの配列.
        /// </summary>
        /// <param name="srcArr"></param>
        /// <param name="dstArr"></param>
        private static void ShiftDFT(CvArr srcArr, CvArr dstArr)
        {
            CvSize size    = Cv.GetSize(srcArr);
            CvSize dstSize = Cv.GetSize(dstArr);

            if (dstSize.Width != size.Width || dstSize.Height != size.Height)
            {
                throw new ApplicationException("Source and Destination arrays must have equal sizes");
            }
            // (9)インプレースモード用のテンポラリバッファ
            CvMat tmp = null;

            if (srcArr == dstArr)
            {
                tmp = Cv.CreateMat(size.Height / 2, size.Width / 2, Cv.GetElemType(srcArr));
            }
            int cx = size.Width / 2;   /* 画像中心 */
            int cy = size.Height / 2;

            // (10)1〜4象限を表す配列と,そのコピー先
            CvMat q1stub, q2stub;
            CvMat q3stub, q4stub;
            CvMat d1stub, d2stub;
            CvMat d3stub, d4stub;
            CvMat q1 = Cv.GetSubRect(srcArr, out q1stub, new CvRect(0, 0, cx, cy));
            CvMat q2 = Cv.GetSubRect(srcArr, out q2stub, new CvRect(cx, 0, cx, cy));
            CvMat q3 = Cv.GetSubRect(srcArr, out q3stub, new CvRect(cx, cy, cx, cy));
            CvMat q4 = Cv.GetSubRect(srcArr, out q4stub, new CvRect(0, cy, cx, cy));
            CvMat d1 = Cv.GetSubRect(srcArr, out d1stub, new CvRect(0, 0, cx, cy));
            CvMat d2 = Cv.GetSubRect(srcArr, out d2stub, new CvRect(cx, 0, cx, cy));
            CvMat d3 = Cv.GetSubRect(srcArr, out d3stub, new CvRect(cx, cy, cx, cy));
            CvMat d4 = Cv.GetSubRect(srcArr, out d4stub, new CvRect(0, cy, cx, cy));

            // (11)実際の象限の入れ替え
            if (srcArr != dstArr)
            {
                if (!Cv.ARE_TYPES_EQ(q1, d1))
                {
                    throw new ApplicationException("Source and Destination arrays must have the same format");
                }
                Cv.Copy(q3, d1, null);
                Cv.Copy(q4, d2, null);
                Cv.Copy(q1, d3, null);
                Cv.Copy(q2, d4, null);
            }
            else
            {      /* インプレースモード */
                Cv.Copy(q3, tmp, null);
                Cv.Copy(q1, q3, null);
                Cv.Copy(tmp, q1, null);
                Cv.Copy(q4, tmp, null);
                Cv.Copy(q2, q4, null);
                Cv.Copy(tmp, q2, null);
            }
            if (tmp != null)
            {
                tmp.Dispose();
            }
        }
Esempio n. 3
0
 static public void NewWindowShow(CvArr imageToShow, string windowName = null)
 {
     if (windowName == null)
     {
         windowName = (WindowCounter++).ToString();
     }
     Cv.NamedWindow(windowName);
     Cv.ShowImage(windowName, imageToShow);
 }
Esempio n. 4
0
        /// <summary>
        /// Corrects gamma
        /// </summary>
        /// <param name="src"></param>
        /// <param name="dst"></param>
        /// <param name="gamma"></param>
        public void CorrectGamma(CvArr src, CvArr dst, double gamma)
        {
            byte[] lut = new byte[256];
            for (int i = 0; i < lut.Length; i++)
            {
                lut[i] = (byte)(Math.Pow(i / 255.0, 1.0 / gamma) * 255.0);
            }

            Cv.LUT(src, dst, lut);
        }
Esempio n. 5
0
        private void StartDrippin()
        {
            while (bActive)
            {
                //Utils.SetForegroundWindow(PROCESS.MainWindowHandle);
                foreach (var Image in ImageList)
                {
                    for (int i = 0; i < 4; i++)
                    {
                        if (bActive)
                        {
                            CvMat Window = Utils.TakeScreenshot().ToMat().ToCvMat();
                            Screenshot = new CvMat(Window.Rows, Window.Cols, MatrixType.U8C1);
                            Window.CvtColor(Screenshot, ColorConversion.BgraToGray);
                            Result = Cv.CreateImage(Cv.Size(Screenshot.Width - Image.Width + 1, Screenshot.Height - Image.Height + 1),
                                                    BitDepth.F32, 1);
                            Cv.MatchTemplate(Screenshot, Image, Result, MatchTemplateMethod.CCoeffNormed);
                            //Cv.Normalize(Result, Result, 0, 1, NormType.MinMax);
                            Cv.MinMaxLoc(Result, out MinAcc, out MaxAcc, out MinPos, out MaxPos, null);

                            if (MaxAcc >= 0.75)
                            {
                                Position = new Point(MaxPos.X + (Image.Width / 2), MaxPos.Y + (Image.Height / 2));
                                Utils.MoveMouse(Position);
                                Thread.Sleep(15);
                                Utils.LeftClick();
                                Thread.Sleep(100);
                                MaxAcc = 0;
                            }
                        }
                    }
                    Result.Dispose();
                }
                Thread.Sleep(2000);
            }
        }
Esempio n. 6
0
        // NOTE : Also seems not well written and craves optimization at places. P.A.N.A.R.G.O.
        // => frame = 8 bit greyscale CvMat
        static public void ContrastEnhancement(CvMat frame)
        {
            //CvMat originalFrame = frame; // return this if cannot enhance
            //if (frame.ElemType != MatrixType.U8C1)
            //	frame = MatOps.Convert(frame, MatrixType.U8C1, 1 / 255.0 );

            /////original histogram
            const int HistBinSize = 256;

            int[] histSizes = new int[1];
            histSizes[0] = HistBinSize;
            CvHistogram hist = new CvHistogram(histSizes, HistogramFormat.Array);

            Cv.CalcArrHist(frame, hist, false);               // size = 256 implied

            CvHistogram newHist    = MatOps.CopyHistogram(hist);
            CvArr       newHistBin = newHist.Bins;

            //double[] origVals = new double[hist.Bins.GetDims( 0 )];
            List <double> origVals = new List <double>(HistBinSize);

            for (int i = 0; i < HistBinSize; i++)
            {
                double elem = newHistBin.GetReal1D(i);
                if (elem != 0)
                {
                    origVals.Add(elem);
                }
            }

            // FIX : See no need for histL, since we have origVals
            //////histogram with only nonzero bins
            //CvMat histL = new CvMat( imageRows, imageCols, MatrixType.F32C1, new CvScalar( 0 ) );
            //for (i = 0; i < origVals.size(); i++)
            //	histL.at<float>( i, 0 ) = origVals.at( i );

            List <double> peakValues = new List <double>(HistBinSize);             //std::vector<int> peakValues;

            //////////3 bin search window
            for (int i = 1; i < origVals.Count - 2; ++i)
            {
                double elem = origVals[i];
                if (elem > origVals[i - 1] && elem > origVals[i + 1])
                {
                    peakValues.Add(elem);
                }
            }

            if (peakValues.Count == 0)
            {
                //Console.Out.WriteLine( "Cannot enhance" );
                return;                 // cannot enhance?
            }

            //////Upper threshold
            double threshUP = 0;

            for (int i = 0; i < peakValues.Count; ++i)
            {
                threshUP += peakValues[i];
            }
            threshUP /= peakValues.Count;

            //////Lower threshold
            double threshDOWN = Math.Min((frame.Cols * frame.Rows), threshUP * origVals.Count) / 256.0;
            //Console.Out.WriteLine( "Enhance thresholds " + threshUP + "/" + threshDOWN );

            //////histogram reconstruction
            CvArr histBins = hist.Bins;

            for (int i = 0; i < HistBinSize; ++i)
            {
                double histElem = histBins.GetReal1D(i);
                if (histElem > threshUP)
                {
                    histBins.SetReal1D(i, threshUP);
                }
                else if (histElem <= threshUP && histElem >= threshDOWN)
                {
                    continue;
                }
                else if (histElem < threshDOWN && histElem > 0)
                {
                    histBins.SetReal1D(i, threshDOWN);
                }
                else if (histElem == 0)
                {
                    continue;
                }
            }
            // accumulated values(?)
            double[] accVals = new double[HistBinSize];             //std::vector<int> accVals;
            accVals[0] = (histBins.GetReal1D(0));
            for (int i = 1; i < HistBinSize; ++i)
            {
                accVals[i] = (accVals[i - 1] + histBins[i]);
            }

            byte[] lookUpTable = new byte[HistBinSize];             //cv::Mat lookUpTable = cv::Mat::zeros( hist.size(), CV_8UC1 );
            for (int i = 0; i < HistBinSize; ++i)
            {
                lookUpTable[i] = (byte)(255.0 * accVals[i] / accVals[255]);
            }

            // assign computed values to input frame
            //Console.Out.Write( "Enhance-->" );
            for (int i = 0; i < frame.Cols; ++i)
            {
                for (int j = 0; j < frame.Rows; ++j)
                {
                    // there is NO mask, thus no need to check for; was: "if (mask.data)..."
                    byte oldValue = (byte)frame.Get2D(j, i);
                    byte newValue = lookUpTable[oldValue];
                    //if ((newValue <1 || newValue > 254) && (newValue != oldValue)) Console.Out.Write( oldValue + " " + newValue + "|");
                    frame.Set2D(j, i, newValue);
                    //frame.SetReal2D( j, i, lookUpTable[ (int)(255.0 * frame.GetReal2D( j, i )) ] / 255.0);
                }
            }
            //Console.Out.WriteLine();

            //frame = MatOps.Convert( frame, MatrixType.U8C1, 255.0 );
        }
Esempio n. 7
0
        private void CaptureMotion()
        {
            try
            {
                float wFactor = (float)this.Width / (float)CaptureBox.Width;
                float hFactor = (float)this.Height / (float)CaptureBox.Height;

                CvArr     array = null;
                CvCapture cap   = CvCapture.FromCamera(CaptureDevice.Any);

                this.Invoke(new Action(() =>
                {
                    lblLoading.Visible     = false;
                    GBBox.Visible          = true;
                    CamBox.Visible         = true;
                    RadarBox.Visible       = true;
                    lblGhost.Visible       = true;
                    GhostBox.Visible       = true;
                    lblDescription.Visible = true;
                    CaptureBox.Visible     = true;
                }));

                SoundTrack.Play();
                SoundTrack = new SoundPlayer("Alarm.wav");

                Thread.Sleep(3600);

                while (true)
                {
                    IplImage img = cap.QueryFrame();

                    if (img == null)
                    {
                        continue;
                    }

                    //int black = img
                    //if (black < 10)
                    //{
                    //    MessageBox.Show("Please get off cameras' field of view and switch off the lights!", "Ghost Busters Cam");

                    //    continue;
                    //}

                    img.Flip(array, FlipMode.Y);

                    string filepath = "haarcascade_frontalface_alt2.xml";

                    CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath);
                    CvSeq <CvAvgComp>       faces   = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1,
                                                                           HaarDetectionType.Zero, new CvSize(70, 70),
                                                                           new CvSize(500, 500));

                    Bitmap Image = BitmapConverter.ToBitmap(img);
                    Image.SetResolution(240, 180);

                    this.Invoke(new Action(() =>
                    {
                        CaptureBox.Image = Image;
                    }));

                    foreach (CvAvgComp face in faces)
                    {
                        IplImage ClonedImage = img.Clone();
                        Cv.SetImageROI(ClonedImage, face.Rect);

                        IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels);
                        Cv.Copy(ClonedImage, ThisFace, null);
                        Cv.ResetImageROI(ClonedImage);

                        Bitmap GhostImage = BitmapConverter.ToBitmap(ThisFace);
                        GhostImage.SetResolution(114, 103);

                        img.DrawRect(face.Rect, CvColor.Red, 3);

                        Bitmap FaceImage = BitmapConverter.ToBitmap(img);
                        FaceImage.SetResolution(240, 180);

                        this.Invoke(new Action(() =>
                        {
                            AlertTimer.Enabled = true;

                            CaptureBox.Image = FaceImage;
                            GhostBox.Image   = GhostImage;
                        }));

                        SoundTrack.Play();

                        Thread.Sleep(2500);

                        this.Invoke(new Action(() =>
                        {
                            AlertTimer.Enabled    = false;
                            lblGhostAlert.Visible = false;
                        }));

                        SoundTrack.Stop();

                        break;
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine(e.Message);
            }
        }
Esempio n. 8
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="coiimg"></param>
        /// <param name="arr"></param>
        /// <param name="coi">[既定値は-1]</param>
#else
        /// <summary>
        /// 
        /// </summary>
        /// <param name="coiimg"></param>
        /// <param name="arr"></param>
        /// <param name="coi">[By default this is -1]</param>
#endif
        public static void InsertImageCOI(Mat coiimg, CvArr arr, int coi = -1)
        {
            if (coiimg == null)
                throw new ArgumentNullException("coiimg");
            if (arr == null)
                throw new ArgumentNullException("arr");
            CppInvoke.cv_insertImageCOI(coiimg.CvPtr, arr.CvPtr, coi);
        }
Esempio n. 9
0
        /// <summary>
        /// 選択されたチャンネルの画像を取り出す
        /// </summary>
        /// <param name="arr">入力配列. CvMat または IplImage の参照.</param>
        /// <param name="coiimg">出力行列. 1チャンネルで, 入力配列srcと同じサイズ・ビット深度を持つ.</param>
        /// <param name="coi">0以上の場合, 指定されたチャンネルについて取り出される。
        /// 0未満の場合, 入力配列srcがIplImageでCOIが指定されていれば, そのCOIについて取り出される. [既定値は-1]</param>
#else
        /// <summary>
        /// Extract the selected image channel
        /// </summary>
        /// <param name="arr">The source array. It should be a pointer to CvMat or IplImage</param>
        /// <param name="coiimg">The destination array; will have single-channel, and the same size and the same depth as src</param>
        /// <param name="coi">If the parameter is &gt;=0, it specifies the channel to extract; 
        /// If it is &lt;0, src must be a pointer to IplImage with valid COI set - then the selected COI is extracted. [By default this is -1]</param>
#endif
        public static void ExtractImageCOI(CvArr arr, Mat coiimg, int coi = -1)
        {
            if (arr == null)
                throw new ArgumentNullException("arr");
            if (coiimg == null)
                throw new ArgumentNullException("coiimg");
            CppInvoke.cv_extractImageCOI(arr.CvPtr, coiimg.CvPtr, coi);
        }
Esempio n. 10
0
        /// <summary>
        /// CvMat・IplImage・CvMatND を cv::Matに変換する
        /// </summary>
        /// <param name="arr">入力配列の CvMat, IplImage, CvMatND</param>
        /// <param name="copyData">
        /// false(既定値)の場合、データはコピーされず、ヘッダのみが新たに作られる。
        /// trueの場合、データはすべてコピーされ、変換後はユーザは元の行列を解放しても構わない。[既定値はfalse]
        /// </param>
        /// <param name="allowND">
        /// true(既定値)の場合、入力のCvMatNDは可能ならばMatに変換される。
        /// もし不可能な場合、またはこのフラグがfalseであれば、このメソッドはエラーを通知する。[既定値はtrue]
        /// </param>
        /// <param name="coiMode">
        /// IplImageのCOIの取り扱いを指定するパラメータ。
        /// falseの場合、COIが設定されていたらエラーを通知する。
        /// trueの場合、エラーは通知せず、代わりに入力画像全体を表すヘッダを返し、ユーザにCOIの扱いを決めさせる。[既定値はfalse]
        /// </param>
        /// <returns></returns>
#else
        /// <summary>
        /// Converts CvMat, IplImage or CvMatND to Mat.
        /// </summary>
        /// <param name="arr">The source CvMat, IplImage or CvMatND</param>
        /// <param name="copyData">
        /// When it is false (default value), no data is copied, only the new header is created. 
        /// The the parameter is true, all the data is copied, then user may deallocate the original array right after the conversion. [By default this is false]
        /// </param>
        /// <param name="allowND">
        /// When it is true (default value), then CvMatND is converted to Mat if it’s possible 
        /// (e.g. then the data is contiguous). If it’s not possible, or when the parameter is false, the function will report an error. [By default this is true]
        /// </param>
        /// <param name="coiMode">
        /// The parameter specifies how the IplImage COI (when set) is handled.
        /// * If coiMode=false, the function will report an error if COI is set.
        /// * If coiMode=true, the function will never report an error; instead it returns the header to the whole original image 
        /// and user will have to check and process COI manually, see extractImageCOI. [By default this is false]
        /// </param>
        /// <returns></returns>
#endif
        public static Mat CvarrToMat(CvArr arr, bool copyData = false, bool allowND = true, bool coiMode = false)
        {
            if (arr == null)
                throw new ArgumentNullException("arr");
            Mat mat = new Mat();
            CppInvoke.cv_cvarrToMat(arr.CvPtr, copyData, allowND, coiMode ? 1 : 0, mat.CvPtr);
            return mat;
        }
Esempio n. 11
0
        /// <summary>
        /// 
        /// </summary>
        /// <param name="coiimg"></param>
        /// <param name="arr"></param>
        /// <param name="coi">[既定値は-1]</param>
#else
        /// <summary>
        /// 
        /// </summary>
        /// <param name="coiimg"></param>
        /// <param name="arr"></param>
        /// <param name="coi">[By default this is -1]</param>
#endif
        public static void InsertImageCOI(InputArray coiimg, CvArr arr, int coi = -1)
        {
            if (coiimg == null)
                throw new ArgumentNullException("coiimg");
            if (arr == null)
                throw new ArgumentNullException("arr");
            coiimg.ThrowIfDisposed();
            arr.ThrowIfDisposed();
            NativeMethods.core_insertImageCOI(coiimg.CvPtr, arr.CvPtr, coi);
        }
Esempio n. 12
0
        /// <summary>
        /// 選択されたチャンネルの画像を取り出す
        /// </summary>
        /// <param name="arr">入力配列. CvMat または IplImage の参照.</param>
        /// <param name="coiimg">出力行列. 1チャンネルで, 入力配列srcと同じサイズ・ビット深度を持つ.</param>
        /// <param name="coi">0以上の場合, 指定されたチャンネルについて取り出される。
        /// 0未満の場合, 入力配列srcがIplImageでCOIが指定されていれば, そのCOIについて取り出される. [既定値は-1]</param>
#else
        /// <summary>
        /// Extract the selected image channel
        /// </summary>
        /// <param name="arr">The source array. It should be a pointer to CvMat or IplImage</param>
        /// <param name="coiimg">The destination array; will have single-channel, and the same size and the same depth as src</param>
        /// <param name="coi">If the parameter is &gt;=0, it specifies the channel to extract; 
        /// If it is &lt;0, src must be a pointer to IplImage with valid COI set - then the selected COI is extracted. [By default this is -1]</param>
#endif
        public static void ExtractImageCOI(CvArr arr, OutputArray coiimg, int coi = -1)
        {
            if (arr == null)
                throw new ArgumentNullException("arr");
            if (coiimg == null)
                throw new ArgumentNullException("coiimg");
            arr.ThrowIfDisposed();
            coiimg.ThrowIfNotReady();
            NativeMethods.core_extractImageCOI(arr.CvPtr, coiimg.CvPtr, coi);
            coiimg.Fix();
        }
Esempio n. 13
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="arr"></param>
 /// <param name="copyData"></param>
 /// <param name="allowND"></param>
 /// <param name="coiMode"></param>
 /// <returns></returns>
 public static Mat CvArrToMat(CvArr arr, bool copyData = false, bool allowND = true, int coiMode = 0)
 {
     if (arr == null)
         throw new ArgumentNullException("arr");
     arr.ThrowIfDisposed();
     IntPtr matPtr = NativeMethods.core_cvarrToMat(arr.CvPtr, copyData ? 1 : 0, allowND ? 1 : 0, coiMode);
     return new Mat(matPtr);
 }
Esempio n. 14
0
        private void CaptureMotion()
        {
            try
            {
                float wFactor = (float)this.Width / (float)CaptureBox.Width;
                float hFactor = (float)this.Height / (float)CaptureBox.Height;

                CvArr     array = null;
                CvCapture cap   = CvCapture.FromCamera(CaptureDevice.Any);

                this.Invoke(new Action(() =>
                {
                    lblLoading.Visible   = false;
                    radioButton1.Visible = true;
                    radioButton2.Visible = true;
                }));

                while (true)
                {
                    IplImage img = cap.QueryFrame();

                    if (img == null)
                    {
                        continue;
                    }

                    img.Flip(array, FlipMode.Y);

                    if (mode == 1)
                    {
                        string filepath = "haarcascade_frontalface_alt2.xml";

                        CvHaarClassifierCascade cascade = CvHaarClassifierCascade.FromFile(filepath);
                        CvSeq <CvAvgComp>       faces   = Cv.HaarDetectObjects(img, cascade, Cv.CreateMemStorage(), 3.0, 1,
                                                                               HaarDetectionType.Zero, new CvSize(70, 70),
                                                                               new CvSize(500, 500));

                        foreach (CvAvgComp face in faces)
                        {
                            //IplImage ClonedImage = img.Clone();
                            //Cv.SetImageROI(ClonedImage, face.Rect);
                            //IplImage ThisFace = Cv.CreateImage(face.Rect.Size, ClonedImage.Depth, ClonedImage.NChannels);
                            //Cv.Copy(ClonedImage, ThisFace, null);
                            //Cv.ResetImageROI(ClonedImage);

                            //Bitmap FaceImage = BitmapConverter.ToBitmap(ThisFace);
                            //FaceImage.SetResolution(240, 180);
                            //CaptureBox.Image = FaceImage;

                            img.DrawRect(face.Rect, CvColor.Red, 3);

                            Bitmap FaceImage = BitmapConverter.ToBitmap(img);
                            FaceImage.SetResolution(240, 180);
                            CaptureBox.Image = FaceImage;

                            this.Invoke(new Action(() =>
                            {
                                LifeBox.Left = (int)(face.Rect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0));
                                LifeBox.Top  = (int)(face.Rect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0));

                                if (LifeBox.Left > (this.Width - LifeBox.Width - 12))
                                {
                                    LifeBox.Left = (this.Width - LifeBox.Width - 24);
                                }

                                if (LifeBox.Top > (this.Height - LifeBox.Height - 48))
                                {
                                    LifeBox.Top = (this.Height - LifeBox.Height - 48);
                                }

                                if (LifeBox.Left < 12)
                                {
                                    LifeBox.Left = 12;
                                }

                                if (LifeBox.Top < 12)
                                {
                                    LifeBox.Top = 12;
                                }

                                Thread.Sleep(30);
                            }));

                            break;
                        }
                    }
                    else
                    {
                        int AllBlobs = 0;

                        CvBlobs  blobs     = null;
                        IplImage imgHSVsrc = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 3);
                        IplImage imgHSVdst = Cv.CreateImage(Cv.GetSize(img), BitDepth.U8, 1);

                        Cv.CvtColor(img, imgHSVsrc, ColorConversion.BgrToHsv);
                        Cv.InRangeS(imgHSVsrc, new CvScalar(86, 80, 30), new CvScalar(115, 250, 250), imgHSVdst);
                        Cv.ReleaseImage(imgHSVsrc);

                        blobs = new CvBlobs(imgHSVdst);
                        blobs.FilterByArea(7000, 40000);

                        AllBlobs = blobs.Count;

                        foreach (KeyValuePair <int, CvBlob> blob in blobs)
                        {
                            CvBlob  CurrentBlob = blob.Value;
                            CvRect  BlobRect = CurrentBlob.Rect;
                            CvPoint Point1, Point2;

                            Point1.X = BlobRect.X;
                            Point1.Y = BlobRect.Y;
                            Point2.X = BlobRect.X + BlobRect.Width;
                            Point2.Y = BlobRect.Y + BlobRect.Height;

                            img.DrawRect(Point1, Point2, CvColor.LightGreen, 3, LineType.AntiAlias);

                            this.Invoke(new Action(() =>
                            {
                                LifeBox.Left = (int)(BlobRect.Left * wFactor - (float)(LifeBox.Width / 2.0) - (float)(this.Width / 2.0));
                                LifeBox.Top  = (int)(BlobRect.Top * hFactor - (float)(LifeBox.Height / 2.0) - (float)(this.Height / 2.0));

                                if (LifeBox.Left > (this.Width - LifeBox.Width - 12))
                                {
                                    LifeBox.Left = (this.Width - LifeBox.Width - 24);
                                }

                                if (LifeBox.Top > (this.Height - LifeBox.Height - 48))
                                {
                                    LifeBox.Top = (this.Height - LifeBox.Height - 48);
                                }

                                if (LifeBox.Left < 12)
                                {
                                    LifeBox.Left = 12;
                                }

                                if (LifeBox.Top < 12)
                                {
                                    LifeBox.Top = 12;
                                }

                                Thread.Sleep(30);
                            }));

                            break;
                        }

                        Bitmap Item = BitmapConverter.ToBitmap(img);
                        Item.SetResolution(240, 180);
                        CaptureBox.Image = Item;

                        Bitmap HSVItem = BitmapConverter.ToBitmap(imgHSVdst);
                        HSVItem.SetResolution(240, 180);
                        HSVCaptureBox.Image = HSVItem;

                        Cv.ReleaseImage(imgHSVdst);
                    }
                }
            }
            catch (Exception e)
            {
                Console.WriteLine("ERROR: " + e.Message + "DETAILS: " + e.StackTrace);
            }
        }
Esempio n. 15
0
        /// <summary>
        /// 検出する線分の角度を指定できる確率的ハフ変換
        /// </summary>
        /// <param name="img">入力画像</param>
        /// <param name="rho">距離解像度(1ピクセル当たりの単位)</param>
        /// <param name="theta">角度解像度(ラジアン単位)</param>
        /// <param name="threshold">閾値パラメータ.対応する投票数がthresholdより大きい場合のみ,抽出された線が返される.</param>
        /// <param name="minLineLength">最小の線の長さ</param>
        /// <param name="maxLineGap">同一線上に存在する線分として扱う,二つの線分の最大の間隔.</param>
        /// <param name="thetaMin">検出する線分の角度の範囲の最小値 [0 &lt;= θ &lt;= π]</param>
        /// <param name="thetaMax">検出する線分の角度の範囲の最大値 [0 &lt;= θ &lt;= π]</param>
        /// <returns></returns>
#else
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        /// <param name="rho"></param>
        /// <param name="theta"></param>
        /// <param name="threshold"></param>
        /// <param name="minLineLength"></param>
        /// <param name="maxLineGap"></param>
        /// <param name="thetaMin"></param>
        /// <param name="thetaMax"></param>
        /// <returns></returns>
#endif
        public static CvLineSegmentPoint[] HoughLinesProbabilisticEx(this CvArr img, double rho, double theta, int threshold, double minLineLength, double maxLineGap,
                                                                     double thetaMin = 0, double thetaMax = Cv.PI)
        {
            if (img == null)
            {
                throw new ArgumentNullException(nameof(img));
            }
            if (img.ElemType != MatrixType.U8C1)
            {
                throw new ArgumentException("The source matrix must be 8-bit, single-channel image.");
            }
            if (rho <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(rho));
            }
            if (theta <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(theta));
            }
            if (threshold <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(threshold));
            }
            if (minLineLength <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(minLineLength));
            }
            if (thetaMax < thetaMin)
            {
                throw new ArgumentException();
            }
            if (thetaMax > Cv.PI)
            {
                throw new ArgumentOutOfRangeException(nameof(thetaMax), "thetaMax <= pi");
            }
            if (thetaMin < 0)
            {
                throw new ArgumentOutOfRangeException(nameof(thetaMin), "thetaMin >= 0");
            }

            unsafe
            {
                // 画像パラメータの収集
                byte *data;
                int   width;
                int   height;
                int   step;

                if (img is IplImage)
                {
                    IplImage obj = (IplImage)img;
                    data   = obj.ImageDataPtr;
                    width  = obj.Width;
                    height = obj.Height;
                    step   = obj.WidthStep;
                }
                else if (img is CvMat)
                {
                    CvMat obj = (CvMat)img;
                    data   = obj.DataByte;
                    width  = obj.Width;
                    height = obj.Height;
                    step   = obj.Step;
                }
                else
                {
                    throw new NotImplementedException("The source matrix of this method must be IplImage or CvMat.");
                }

                // sin, cosのLUTを作っておく
                double   numAngleAll = Cv.PI / theta;
                int      angleMin    = (int)Math.Round(numAngleAll * (thetaMin / Cv.PI)); //(int)Math.Round(thetaMin * 180 / Cv.PI);
                int      angleMax    = (int)Math.Round(numAngleAll * (thetaMax / Cv.PI));
                int      numAngle    = angleMax - angleMin;
                int      numRho      = (int)Math.Round(((width + height) * 2 + 1) / rho);
                double[] sin         = new double[angleMax]; // 大きめに作成。angleMinより手前の要素は使わない
                double[] cos         = new double[angleMax];
                {
                    double rad  = thetaMin;
                    double irho = 1 / rho;
                    for (int t = angleMin; t < angleMax; t++, rad += theta)
                    {
                        sin[t] = Math.Sin(rad * irho);
                        cos[t] = Math.Cos(rad * irho);
                    }
                }

                // 1. 非0の点を収集
                CvPoint[] points = new CvPoint[Cv.CountNonZero(img)];
                bool[]    mask   = new bool[width * height];
                int       i      = 0;
                for (int y = 0; y < height; y++)
                {
                    byte *p      = data + y * step;
                    int   offset = y * width;
                    for (int x = 0; x < width; x++)
                    {
                        if (p[x] != 0)
                        {
                            mask[offset + x] = true;
                            points[i++]      = new CvPoint(x, y);
                        }
                        else
                        {
                            mask[offset + x] = false;
                        }
                    }
                }

                // ランダムな順に並び変え
                Shuffle(points);

                // 2. 画素をランダムに選択し処理
                int[] accum = new int[numAngle * numRho];
                List <CvLineSegmentPoint> result = new List <CvLineSegmentPoint>();
                for (int count = 0; count < points.Length; count++)
                {
                    CvPoint pt = points[count];

                    // 画素データが更新されているのは除外
                    if (!mask[pt.Y * width + pt.X])
                    {
                        continue;
                    }

                    // 2.1 [θ,ρ]空間で投票し、投票値が最大値となるθを求める
                    int maxR = threshold - 1;
                    int maxT = 0;
                    fixed(int *paccum = accum)
                    {
                        int *adata = paccum;

                        for (int t = angleMin; t < angleMax; t++, adata += numRho)
                        {
                            int r = (int)Math.Round(pt.X * cos[t] + pt.Y * sin[t]);
                            r += (numRho - 1) / 2;
                            int val = ++adata[r];
                            if (maxR < val)
                            {
                                maxR = val;
                                maxT = t;
                            }
                        }
                    }

                    if (maxR < threshold)
                    {
                        continue;
                    }

                    // 2.2 追尾用の増分値 (dx0,dy0) の設定
                    double    a = -sin[maxT];
                    double    b = cos[maxT];
                    int       x0 = pt.X;
                    int       y0 = pt.Y;
                    int       dx0, dy0;
                    bool      xflag;
                    const int Shift = 16;
                    if (Math.Abs(a) > Math.Abs(b))
                    {
                        xflag = true;
                        dx0   = a > 0 ? 1 : -1;
                        dy0   = (int)Math.Round(b * (1 << Shift) / Math.Abs(a));
                        y0    = (y0 << Shift) + (1 << (Shift - 1));
                    }
                    else
                    {
                        xflag = false;
                        dy0   = b > 0 ? 1 : -1;
                        dx0   = (int)Math.Round(a * (1 << Shift) / Math.Abs(b));
                        x0    = (x0 << Shift) + (1 << (Shift - 1));
                    }

                    // 2.3 線分画素を両端方向に追尾し、線分を抽出
                    CvPoint[] lineEnd = { new CvPoint(), new CvPoint() };
                    for (int k = 0; k < 2; k++)
                    {
                        int gap = 0;
                        int x = x0, y = y0, dx = dx0, dy = dy0;

                        if (k > 0)
                        {
                            dx = -dx;
                            dy = -dy;
                        }

                        // walk along the line using fixed-point arithmetics,
                        // stop at the image border or in case of too big gap
                        for (; ; x += dx, y += dy)
                        {
                            int x1, y1;

                            if (xflag)
                            {
                                x1 = x;
                                y1 = y >> Shift;
                            }
                            else
                            {
                                x1 = x >> Shift;
                                y1 = y;
                            }

                            if (x1 < 0 || x1 >= width || y1 < 0 || y1 >= height)
                            {
                                break;
                            }

                            // for each non-zero point:
                            //    update line end,
                            //    clear the mask element
                            //    reset the gap
                            if (mask[y1 * width + x1])
                            {
                                gap          = 0;
                                lineEnd[k].X = x1;
                                lineEnd[k].Y = y1;
                            }
                            else if (++gap > maxLineGap)
                            {
                                break;
                            }
                        }
                    }

                    // lineLengthより長いものを線分候補とする
                    bool goodLine = Math.Abs(lineEnd[1].X - lineEnd[0].X) >= minLineLength ||
                                    Math.Abs(lineEnd[1].Y - lineEnd[0].Y) >= minLineLength;

                    // 2.4 追尾した画素を削除し、次回以降は処理されないようにする
                    //if (processOnce)
                    {
                        for (int k = 0; k < 2; k++)
                        {
                            int x = x0, y = y0, dx = dx0, dy = dy0;

                            if (k > 0)
                            {
                                dx = -dx;
                                dy = -dy;
                            }

                            // walk along the line using fixed-point arithmetics,
                            // stop at the image border or in case of too big gap
                            for (; ; x += dx, y += dy)
                            {
                                int x1, y1;

                                if (xflag)
                                {
                                    x1 = x;
                                    y1 = y >> Shift;
                                }
                                else
                                {
                                    x1 = x >> Shift;
                                    y1 = y;
                                }

                                // for each non-zero point:
                                //    update line end,
                                //    clear the mask element
                                //    reset the gap
                                if (mask[y1 * width + x1])
                                {
                                    if (goodLine)
                                    {
                                        fixed(int *paccum = accum)
                                        {
                                            int *adata = paccum;

                                            for (int t = angleMin; t < angleMax; t++, adata += numRho)
                                            {
                                                int r = (int)Math.Round(x1 * cos[t] + y1 * sin[t]);
                                                r += (numRho - 1) / 2;
                                                adata[r]--;
                                            }
                                        }
                                    }
                                    mask[y1 * width + x1] = false;
                                }

                                if (y1 == lineEnd[k].Y && x1 == lineEnd[k].X)
                                {
                                    break;
                                }
                            }
                        }
                    }

                    if (goodLine)
                    {
                        result.Add(new CvLineSegmentPoint(lineEnd[0], lineEnd[1]));
                    }
                }

                return(result.ToArray());
            }
        }