public void cariX(IplImage imgSrc, ref int min, ref int max)
        {
            bool minTemu = false;

            data = new CvMat();

            CvScalar maxVal = cvlib.cvRealScalar(imgSrc.width * 255);
            CvScalar val = cvlib.cvRealScalar(0);

            //For each column sum, if sum <width * 255 then we find min
            //then proceed to the end of me to find max, if sum <width * 255 then found a new max
            for (int i = 0; i < imgSrc.width; i++)
            {
                cvlib.cvGetCol( imgSrc,  data, i); //col
                val = cvlib.cvSum( data);
                if (val.Val < maxVal.Val)
                {
                    max = i;
                    if (!minTemu)
                    {
                        min = i;
                        minTemu = true;
                    }
                }
            }
        }
Esempio n. 2
0
        public void Absolute(IplImage imgNow)
        {
            imgDiff = cvlib.cvCreateImage(cvlib.cvGetSize( imgNow), imgNow.depth, imgNow.nChannels);

            if (!sudah_ambil)
            {
                imgLast = cvlib.cvCreateImage(cvlib.cvGetSize( imgNow), imgNow.depth, imgNow.nChannels);
                imgLast = cvlib.cvCloneImage( imgNow);
                sudah_ambil = true;
            }
            else
                sudah_ambil = false;

            cvlib.cvAbsDiff( imgNow,  imgLast,  imgDiff);

            cvlib.cvSmooth( imgDiff,  imgDiff);
            cvlib.cvSmooth( imgDiff,  imgDiff);

            if (form.showAbs)
                cvlib.cvShowImage("Motion",  imgDiff);

            countWhitePix(imgDiff);

            if (!sudah_ambil)
                cvlib.cvReleaseImage( imgLast);

            cvlib.cvReleaseImage( imgNow);
            cvlib.cvReleaseImage( imgDiff);
        }
        public void cariY(IplImage imgSrc, ref int min, ref int max)
        {
            bool minFound = false;

            data = new CvMat();

            CvScalar maxVal = cvlib.cvRealScalar(imgSrc.width * 255);
            CvScalar val = cvlib.cvRealScalar(0);

            //For each row sum, if sum <width * 255 then we find min
            //then proceed to the end of me to find max, if sum <width * 255 then found a new max
            for (int i = 0; i < imgSrc.height; i++)
            {
                cvlib.cvGetRow( imgSrc,  data, i); //row
                val = cvlib.cvSum( data);
                if (val.val1 < maxVal.val1)
                {
                    max = i;
                    if (!minFound)
                    {
                        min = i;
                        minFound = true;
                    }
                }
            }
        }
        public void Absolute(IplImage imgNow)
        {
            imgDiff = cxcore.CvCreateImage(cxcore.CvGetSize(ref imgNow), imgNow.depth, imgNow.nChannels);

            if (!sudah_ambil)
            {
                imgLast = cxcore.CvCreateImage(cxcore.CvGetSize(ref imgNow), imgNow.depth, imgNow.nChannels);
                imgLast = cxcore.CvCloneImage(ref imgNow);
                sudah_ambil = true;
            }
            else
                sudah_ambil = false;

            cxcore.CvAbsDiff(ref imgNow, ref imgLast, ref imgDiff);

            cv.CvSmooth(ref imgDiff, ref imgDiff);
            cv.CvSmooth(ref imgDiff, ref imgDiff);

            if(form.showAbs)
                highgui.CvShowImage("Motion", ref imgDiff);

            countWhitePix(imgDiff);

            if (!sudah_ambil)
                cxcore.CvReleaseImage(ref imgLast);

            cxcore.CvReleaseImage(ref imgNow);
            cxcore.CvReleaseImage(ref imgDiff);
        }
Esempio n. 5
0
        public bool adaBlackPix(IplImage image)
        {
            int p, black = 0;

            byte pix;

            byte[] data = image.ImageData;

            for (int x = 0; x < image.widthStep; x++)
            {
                for (int y = 0; y < image.height; y++)
                {
                    p = y * image.widthStep + x;

                    pix = data[p];

                    if (pix == 0)
                        black++;
                }
            }

            if (black < 1000)
                return false;
            else
                return true;
        }
        public void cariY(IplImage imgSrc, ref int min, ref int max)
        {
            bool minFound = false;

            data = new CvMat();

            CvScalar maxVal = cxtypes.cvRealScalar(imgSrc.width * 255);
            CvScalar val = cxtypes.cvRealScalar(0);

            //utk setiap baris sum, jika sum < width*255 maka kita temukan min
            //kemudian lanjutkan hingga akhir utk menemukan max, jika sum < width*255 maka ditemukan max baru
            for (int i = 0; i < imgSrc.height; i++)
            {
                cxcore.CvGetRow(ref imgSrc, ref data, i); //row
                val = cxcore.CvSum(ref data);
                if (val.val1 < maxVal.val1)
                {
                    max = i;
                    if (!minFound)
                    {
                        min = i;
                        minFound = true;
                    }
                }
            }
        }
Esempio n. 7
0
        public IplImage skin_hsv(IplImage image)
        {
            int xi, x, y, p;
            IplImage img_hsv;
            img_hsv = cvlib.cvCreateImage(cvlib.cvGetSize( image), 8, 3);
            cvlib.cvCvtColor( image,  img_hsv, cvlib.CV_BGR2HSV);

            num[,] bmpdata;
            bmpdata = new num[image.height, image.width];

            byte[] dataIn = img_hsv.ImageData;

            for (y = 0; y < image.height; y++)
            {
                for (xi = 0, x = 0; xi < image.widthStep; xi += 3, x++)
                {
                    //column position
                    p = y * image.widthStep + xi;

                    //grab the pixel data
                    bmpdata[y, x].H = dataIn[p];
                    bmpdata[y, x].S = dataIn[p + 1];
                    bmpdata[y, x].V = dataIn[p + 2];
                }
            }

            for (y = 0; y < image.height; y++)
            {
                for (x = 0; x < image.width; x++)
                {
                    if (bmpdata[y, x].H <= 19 && bmpdata[y, x].S >= 48) //jika kondisi cocok maka jgn d hitamkan
                        bmpdata[y, x].H += 0;
                    else
                        bmpdata[y, x].H = bmpdata[y, x].S = bmpdata[y, x].V = 0;
                }
            }

            for (y = 0; y < image.height; y++)
            {
                for (xi = 0, x = 0; xi < image.widthStep; xi += 3, x++)
                {
                    //column position
                    p = y * image.widthStep + xi;

                    //grab the pixel data
                    dataIn[p] = bmpdata[y, x].H;
                    dataIn[p + 1] = bmpdata[y, x].S;
                    dataIn[p + 2] = bmpdata[y, x].V;
                }
            }

            img_hsv.ImageData = dataIn;

            IplImage res = cvlib.cvCreateImage(cvlib.cvGetSize( image), 8, 3);
            cvlib.cvCvtColor( img_hsv,  res, cvlib.CV_HSV2BGR);

            cvlib.cvReleaseImage( img_hsv);
            return res;
        }
Esempio n. 8
0
 public void NewEqualsOld3()
 {
     using (var img = new IplImage(@"Image\Blob\shapes3.png", LoadMode.GrayScale))
     {
         CompareBlob(img);
         CompareRendering(img);
         CompareLabelImage(img);
     }
 }
Esempio n. 9
0
        /// <summary>
        /// Calculates central moment for a blob.
        /// Central moments will be stored in blob structure. (cvCentralMoments)
        /// </summary>
        /// <param name="blob">Blob.</param>
        /// <param name="img">Label image (depth=IPL_DEPTH_LABEL and num. channels=1).</param>
        public static void CentralMoments(CvBlob blob, IplImage img)
        {
            if (blob == null)
                throw new ArgumentNullException("blob");
            if (img == null)
                throw new ArgumentNullException("img");

            CvBlobInvoke.cvb_cvCentralMoments(blob.CvPtr, img.CvPtr);
        }
Esempio n. 10
0
 /// <summary>
 /// 
 /// </summary>
 /// <param name="inputBgrImage"></param>
 /// <param name="outputHueMask"></param>
 public virtual void Process(IplImage inputBgrImage, IplImage outputHueMask)
 {
     if (disposed)
         throw new ObjectDisposedException("CvAdaptiveSkinDetector");
     if (inputBgrImage == null)
         throw new ArgumentNullException("inputBgrImage");
     if (outputHueMask == null)
         throw new ArgumentNullException("outputHueMask");
     inputBgrImage.ThrowIfDisposed();
     outputHueMask.ThrowIfDisposed();
     NativeMethods.contrib_CvAdaptiveSkinDetector_process(ptr, inputBgrImage.CvPtr, outputHueMask.CvPtr);
 }
Esempio n. 11
0
        public ConvexityDefect()
        {
            using (IplImage imgSrc = new IplImage(@"img\hand_p.jpg", LoadMode.Color))
            using (IplImage imgHSV = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgH = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgS = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgV = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgBackProjection = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgFlesh = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgHull = new IplImage(imgSrc.Size, BitDepth.U8, 1))
            using (IplImage imgDefect = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (IplImage imgContour = new IplImage(imgSrc.Size, BitDepth.U8, 3))
            using (CvMemStorage storage = new CvMemStorage())
            {
                // RGB -> HSV
                Cv.CvtColor(imgSrc, imgHSV, ColorConversion.BgrToHsv);
                Cv.CvtPixToPlane(imgHSV, imgH, imgS, imgV, null);
                IplImage[] hsvPlanes = {imgH, imgS, imgV};

                // 肌色領域を求める
                RetrieveFleshRegion(imgSrc, hsvPlanes, imgBackProjection);
                // 最大の面積の領域を残す
                FilterByMaximalBlob(imgBackProjection, imgFlesh);
                Interpolate(imgFlesh);

                // 輪郭を求める
                CvSeq<CvPoint> contours = FindContours(imgFlesh, storage);
                if (contours != null)
                {
                    Cv.DrawContours(imgContour, contours, CvColor.Red, CvColor.Green, 0, 3, LineType.AntiAlias);

                    // 凸包を求める
                    int[] hull;
                    Cv.ConvexHull2(contours, out hull, ConvexHullOrientation.Clockwise);
                    Cv.Copy(imgFlesh, imgHull);
                    DrawConvexHull(contours, hull, imgHull);

                    // 凹状欠損を求める
                    Cv.Copy(imgContour, imgDefect);
                    CvSeq<CvConvexityDefect> defect = Cv.ConvexityDefects(contours, hull);
                    DrawDefects(imgDefect, defect);
                }

                using (new CvWindow("src", imgSrc))
                using (new CvWindow("back projection", imgBackProjection))
                using (new CvWindow("hull", imgHull))
                using (new CvWindow("defect", imgDefect))
                {
                    Cv.WaitKey();
                }
            }
        }
        public void countWhitePix(IplImage image)
        {
            int p, white = 0 ;

            byte pix;

            byte[] data = image.ImageDataUChar;

            for (int x = 0; x < image.widthStep; x++)
            {
                for (int y = 0; y < image.height; y++)
                {
                    p = y * image.widthStep + x;

                    pix = data[p];

                    if (pix == 255)
                        white++;
                }
            }

            if (white < 50 && white < 5)
                diam++;
            else
                diam = 0;

            if (white > 100)
            {
                gerak++;
                if (white > 500)
                    wave++;
            }

            if (diam > 10)
            {
                gerak = 0;
                wave = 0;
                diam = 0;
                form.match = true;
            }

            if (wave > 10)
            {
                form.reset = true;
                wave = 0;
                gerak = 0;
                diam = 0;
            }

            cxcore.CvReleaseImage(ref image);
        }
Esempio n. 13
0
        /// <summary>
        /// Niblackの手法による二値化処理を行う。
        /// </summary>
        /// <param name="imgSrc">入力画像</param>
        /// <param name="imgDst">出力画像</param>
        /// <param name="kernelSize">局所領域のサイズ</param>
        /// <param name="k">係数</param>
#else
        /// <summary>
        /// Binarizes by Niblack's method
        /// </summary>
        /// <param name="src">Input image</param>
        /// <param name="dst">Output image</param>
        /// <param name="kernelSize">Window size</param>
        /// <param name="k">Adequate coefficient</param>
#endif
        public static void Niblack(IplImage src, IplImage dst, int kernelSize, double k)
        {
            if (src == null)
                throw new ArgumentNullException("src");
            if (dst == null)
                throw new ArgumentNullException("dst");

            // グレースケールのみ
            if (src.NChannels != 1)
                throw new ArgumentException("src must be gray scale image");
            if (dst.NChannels != 1)
                throw new ArgumentException("dst must be gray scale image");

            // サイズのチェック
            if (kernelSize < 3)
                throw new ArgumentOutOfRangeException("kernelSize", "size must be 3 and above");
            if (kernelSize % 2 == 0)
                throw new ArgumentOutOfRangeException("kernelSize", "size must be odd number");

            CvRect roi = src.ROI;
            int width = roi.Width;
            int height = roi.Height;
            if (width != dst.Width || height != dst.Height)
                throw new ArgumentException("src.Size == dst.Size");

            unsafe
            {
                byte* pSrc = src.ImageDataPtr;
                byte* pDst = dst.ImageDataPtr;
                int stepSrc = src.WidthStep;
                int stepDst = dst.WidthStep;
                //for (int y = 0; y < gray.Height; y++)
                MyParallel.For(0, height, delegate(int y)
                {
                    for (int x = 0; x < width; x++)
                    {
                        double m, s;
                        MeanStddev(src, x + roi.X, y + roi.Y, kernelSize, out m, out s);
                        double threshold = m + k * s;
                        int offsetSrc = stepSrc * (y + roi.Y) + (x + roi.X);
                        int offsetDst = stepDst * y + x;
                        if (pSrc[offsetSrc] < threshold)
                            pDst[offsetDst] = 0;
                        else
                            pDst[offsetDst] = 255;
                    }
                }
                );
            }
        }
Esempio n. 14
0
        public void SimpleTest()
        {
            using (var src = new IplImage(@"Image\Blob\shapes2.png", LoadMode.GrayScale))
            using (var binary = new IplImage(src.Size, BitDepth.U8, 1))
            using (var render = new IplImage(src.Size, BitDepth.U8, 3))
            {
                Cv.Threshold(src, binary, 0, 255, ThresholdType.Otsu);

                var blobs = new CvBlobs(binary);
                blobs.RenderBlobs(src, render);
                using (new CvWindow(render))
                {
                    Cv.WaitKey();
                }
            }
        }
Esempio n. 15
0
        /// <summary>
        /// バックプロジェクションにより肌色領域を求める
        /// </summary>
        /// <param name="imgSrc"></param>
        /// <param name="hsvPlanes"></param>
        /// <param name="imgRender"></param>
        private void RetrieveFleshRegion(IplImage imgSrc, IplImage[] hsvPlanes, IplImage imgDst)
        {
            int[] histSize = new int[] {30, 32};
            float[] hRanges = {0.0f, 20f};
            float[] sRanges = {50f, 255f};
            float[][] ranges = {hRanges, sRanges};

            imgDst.Zero();
            using (CvHistogram hist = new CvHistogram(histSize, HistogramFormat.Array, ranges, true))
            {
                hist.Calc(hsvPlanes, false, null);
                float minValue, maxValue;
                hist.GetMinMaxValue(out minValue, out maxValue);
                hist.Normalize(imgSrc.Width * imgSrc.Height * 255 / maxValue);
                hist.CalcBackProject(hsvPlanes, imgDst);
            }
        }
Esempio n. 16
0
        private static void Surf(IplImage img1, IplImage img2)
        {
            Mat src = new Mat(img1, true);
            Mat src2 = new Mat(img2, true);
            //Detect the keypoints and generate their descriptors using SURF
            SURF surf = new SURF(500, 4, 2, true);
            KeyPoint[] keypoints1, keypoints2;
            MatOfFloat descriptors1 = new MatOfFloat();
            MatOfFloat descriptors2 = new MatOfFloat();
            surf.Run(src, null, out keypoints1, descriptors1);
            surf.Run(src2, null, out keypoints2, descriptors2);
            // Matching descriptor vectors with a brute force matcher
            BFMatcher matcher = new BFMatcher(NormType.L2, false);
            DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所
            Mat view = new Mat();
            Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view);

            Window.ShowImages(view);
        }
Esempio n. 17
0
        //img For classifying method which is based on the nearest class
        public float classify(ref IplImage img, bool showResult)
        {
            CvMat data = new CvMat();
            CvMat results = new CvMat();                                                                    //<<<< check
            CvMat dist = new CvMat();                                                                       //<<<< check
            CvMat nearest = cvlib.cvCreateMat(1, K, cvlib.CV_32FC1);                                     //<<<< check

            float result;
            //process file
            prs_image = p.preprocess(img, size, size);

            //set data
            img32 = cvlib.cvCreateImage(cvlib.cvSize(size, size), (int)cvlib.IPL_DEPTH_32F, 1);
            cvlib.cvConvertScale( prs_image, img32, 0.0039215, 0);
            cvlib.cvGetSubRect( img32,  data, cvlib.cvRect(0, 0, size, size));             //possible memory leak??

            CvMat row_header = new CvMat();
            CvMat row1 = new CvMat();                                                                       //<<< check

            //convert data matrix size x size to vector
            row1 = cvlib.cvReshape( data,  row_header, 0, 1);                                        //<<< check

            result = knn.find_nearest(row1, K, results, IntPtr.Zero, nearest, dist);

            int accuracy = 0;
            for (int i = 0; i < K; i++)
            {
                if (nearest.fl[i] == result)
                    accuracy++;
            }
            float pre = 100 * ((float)accuracy / (float)K);
            if (showResult == true)
            {
                form.WriteLine("|\tClass\t\t|\tPrecision\t\t|\tAccuracy/K\t|\n", false, false);
                form.WriteLine("|\t" + result.ToString() + "\t\t|\t" + pre.ToString("N2") + "% \t\t|\t" + accuracy.ToString() + "/" + K.ToString() + "\t\t|" + "\n", false, false);
                form.WriteLine(" -------------------------------------------------------------------------------------------------------------------------------------------------\n", false, false);
            }

            cvlib.cvReleaseImage( img);

            return result;
        }
Esempio n. 18
0
 private static void GrayCapture(Capture capture)
 {
     using (var window = new NamedWindow("test"))
     {
         while (CV.WaitKey(10) < 0) //laisse la main
         {
             using (var src = capture.QueryFrame())// recupere une image
             {
                 if (src == null)
                 {
                     break;
                 }
                 using (var gray = new IplImage(src.Size, IplDepth.U8, 1)) //filtre
                 using (var dstCanny = new IplImage(src.Size, IplDepth.U8, 1))
                 {
                     CV.CvtColor(src, gray, ColorConversion.Bgr2Gray);
                     CV.Canny(gray, dstCanny, 50, 50);
                     window.ShowImage(dstCanny);
                 }
             }
         }
     }
 }
Esempio n. 19
0
 public void DebugShowOldLabel(IplImage oldLabels)
 {
     using (IplImage img = new IplImage(oldLabels.Size, BitDepth.U8, 1))
     {
         img.Zero();
         for (int r = 0; r < img.Height; r++)
         {
             for (int c = 0; c < img.Width; c++)
             {
                 try
                 {
                     if (oldLabels[r, c] != 0)
                         img[r, c] = 255;
                 }
                 catch
                 {
                     throw;
                 }
             }
         }
         CvWindow.ShowImages("old", img);
     }
 }
        public CvRect cariBB(IplImage imgSrc)
        {
            CvRect aux;
            int xmin, xmax, ymin, ymax, height, width;
            xmin = xmax = ymin = ymax = height = width = 0;

            cariX(imgSrc, ref xmin, ref xmax);
            cariY(imgSrc, ref ymin, ref ymax);

            width = xmax - xmin;
            height = ymax - ymin;

            double lebar = width * 1.5;

            height = height >= (width * 1.5) ? (int)lebar : height;

            //form.WriteLine("height = " + height.ToString(), true, true);
            //form.WriteLine("width = " + width.ToString(), true, true);

            aux = new CvRect(xmin, ymin, width, height);

            return aux;
        }
Esempio n. 21
0
        static void Main(string[] args)
        {
            //détourage
            using (var capture = Capture.CreateCameraCapture(0))
            {
                using (var window = new NamedWindow("test"))
                {
                    while (CV.WaitKey(10) < 0) //laisse la main
                    {
                        using (var src = capture.QueryFrame())// recupere une image
                        using (var gray = new IplImage(src.Size, IplDepth.U8, 1)) //filtre
                        using (var dstCanny = new IplImage(src.Size, IplDepth.U8, 1))
                        {
                            CV.CvtColor(src, gray, ColorConversion.Bgr2Gray);
                            CV.Canny(gray, dstCanny, 50, 50);
                            window.ShowImage(dstCanny);
                        }
                    }
                }
            }


            //var fileCapture = Capture.CreateFileCapture("/path/to/your/video/test.avi");
        }
Esempio n. 22
0
        /// <summary>
        /// 検出する線分の角度を指定できる確率的ハフ変換
        /// </summary>
        /// <param name="img">入力画像</param>
        /// <param name="rho">距離解像度(1ピクセル当たりの単位)</param>
        /// <param name="theta">角度解像度(ラジアン単位)</param>
        /// <param name="threshold">閾値パラメータ.対応する投票数がthresholdより大きい場合のみ,抽出された線が返される.</param>
        /// <param name="minLineLength">最小の線の長さ</param>
        /// <param name="maxLineGap">同一線上に存在する線分として扱う,二つの線分の最大の間隔.</param>
        /// <param name="thetaMin">検出する線分の角度の範囲の最小値 [0 &lt;= θ &lt;= π]</param>
        /// <param name="thetaMax">検出する線分の角度の範囲の最大値 [0 &lt;= θ &lt;= π]</param>
        /// <returns></returns>
#else
        /// <summary>
        ///
        /// </summary>
        /// <param name="img"></param>
        /// <param name="rho"></param>
        /// <param name="theta"></param>
        /// <param name="threshold"></param>
        /// <param name="minLineLength"></param>
        /// <param name="maxLineGap"></param>
        /// <param name="thetaMin"></param>
        /// <param name="thetaMax"></param>
        /// <returns></returns>
#endif
        public static CvLineSegmentPoint[] HoughLinesProbabilisticEx(this CvArr img, double rho, double theta, int threshold, double minLineLength, double maxLineGap,
                                                                     double thetaMin = 0, double thetaMax = Cv.PI)
        {
            if (img == null)
            {
                throw new ArgumentNullException("img");
            }
            if (img.ElemType != MatrixType.U8C1)
            {
                throw new ArgumentException("The source matrix must be 8-bit, single-channel image.");
            }
            if (rho <= 0)
            {
                throw new ArgumentOutOfRangeException("rho");
            }
            if (theta <= 0)
            {
                throw new ArgumentOutOfRangeException("theta");
            }
            if (threshold <= 0)
            {
                throw new ArgumentOutOfRangeException("threshold");
            }
            if (minLineLength <= 0)
            {
                throw new ArgumentOutOfRangeException("minLineLength");
            }
            if (thetaMax < thetaMin)
            {
                throw new ArgumentException();
            }
            if (thetaMax > Cv.PI)
            {
                throw new ArgumentOutOfRangeException("thetaMax", "thetaMax <= pi");
            }
            if (thetaMin < 0)
            {
                throw new ArgumentOutOfRangeException("thetaMin", "thetaMin >= 0");
            }

            unsafe
            {
                // 画像パラメータの収集
                byte *data;
                int   width;
                int   height;
                int   step;

                if (img is IplImage)
                {
                    IplImage obj = (IplImage)img;
                    data   = obj.ImageDataPtr;
                    width  = obj.Width;
                    height = obj.Height;
                    step   = obj.WidthStep;
                }
                else if (img is CvMat)
                {
                    CvMat obj = (CvMat)img;
                    data   = obj.DataByte;
                    width  = obj.Width;
                    height = obj.Height;
                    step   = obj.Step;
                }
                else
                {
                    throw new NotImplementedException("The source matrix of this method must be IplImage or CvMat.");
                }

                // sin, cosのLUTを作っておく
                double   numAngleAll = Cv.PI / theta;
                int      angleMin    = (int)Math.Round(numAngleAll * (thetaMin / Cv.PI)); //(int)Math.Round(thetaMin * 180 / Cv.PI);
                int      angleMax    = (int)Math.Round(numAngleAll * (thetaMax / Cv.PI));
                int      numAngle    = angleMax - angleMin;
                int      numRho      = (int)Math.Round(((width + height) * 2 + 1) / rho);
                double[] sin         = new double[angleMax]; // 大きめに作成。angleMinより手前の要素は使わない
                double[] cos         = new double[angleMax];
                {
                    double rad  = thetaMin;
                    double irho = 1 / rho;
                    for (int t = angleMin; t < angleMax; t++, rad += theta)
                    {
                        sin[t] = Math.Sin(rad * irho);
                        cos[t] = Math.Cos(rad * irho);
                    }
                }

                // 1. 非0の点を収集
                CvPoint[] points = new CvPoint[Cv.CountNonZero(img)];
                bool[]    mask   = new bool[width * height];
                int       i      = 0;
                for (int y = 0; y < height; y++)
                {
                    byte *p      = data + y * step;
                    int   offset = y * width;
                    for (int x = 0; x < width; x++)
                    {
                        if (p[x] != 0)
                        {
                            mask[offset + x] = true;
                            points[i++]      = new CvPoint(x, y);
                        }
                        else
                        {
                            mask[offset + x] = false;
                        }
                    }
                }

                // ランダムな順に並び変え
                Shuffle(points);

                // 2. 画素をランダムに選択し処理
                int[] accum = new int[numAngle * numRho];
                List <CvLineSegmentPoint> result = new List <CvLineSegmentPoint>();
                for (int count = 0; count < points.Length; count++)
                {
                    CvPoint pt = points[count];

                    // 画素データが更新されているのは除外
                    if (!mask[pt.Y * width + pt.X])
                    {
                        continue;
                    }

                    // 2.1 [θ,ρ]空間で投票し、投票値が最大値となるθを求める
                    int maxR = threshold - 1;
                    int maxT = 0;
                    fixed(int *paccum = accum)
                    {
                        int *adata = paccum;

                        for (int t = angleMin; t < angleMax; t++, adata += numRho)
                        {
                            int r = (int)Math.Round(pt.X * cos[t] + pt.Y * sin[t]);
                            r += (numRho - 1) / 2;
                            int val = ++adata[r];
                            if (maxR < val)
                            {
                                maxR = val;
                                maxT = t;
                            }
                        }
                    }

                    if (maxR < threshold)
                    {
                        continue;
                    }

                    // 2.2 追尾用の増分値 (dx0,dy0) の設定
                    double    a = -sin[maxT];
                    double    b = cos[maxT];
                    int       x0 = pt.X;
                    int       y0 = pt.Y;
                    int       dx0, dy0;
                    bool      xflag;
                    const int Shift = 16;
                    if (Math.Abs(a) > Math.Abs(b))
                    {
                        xflag = true;
                        dx0   = a > 0 ? 1 : -1;
                        dy0   = (int)Math.Round(b * (1 << Shift) / Math.Abs(a));
                        y0    = (y0 << Shift) + (1 << (Shift - 1));
                    }
                    else
                    {
                        xflag = false;
                        dy0   = b > 0 ? 1 : -1;
                        dx0   = (int)Math.Round(a * (1 << Shift) / Math.Abs(b));
                        x0    = (x0 << Shift) + (1 << (Shift - 1));
                    }

                    // 2.3 線分画素を両端方向に追尾し、線分を抽出
                    CvPoint[] lineEnd = { new CvPoint(), new CvPoint() };
                    for (int k = 0; k < 2; k++)
                    {
                        int gap = 0;
                        int x = x0, y = y0, dx = dx0, dy = dy0;

                        if (k > 0)
                        {
                            dx = -dx;
                            dy = -dy;
                        }

                        // walk along the line using fixed-point arithmetics,
                        // stop at the image border or in case of too big gap
                        for (; ; x += dx, y += dy)
                        {
                            int x1, y1;

                            if (xflag)
                            {
                                x1 = x;
                                y1 = y >> Shift;
                            }
                            else
                            {
                                x1 = x >> Shift;
                                y1 = y;
                            }

                            if (x1 < 0 || x1 >= width || y1 < 0 || y1 >= height)
                            {
                                break;
                            }

                            // for each non-zero point:
                            //    update line end,
                            //    clear the mask element
                            //    reset the gap
                            if (mask[y1 * width + x1])
                            {
                                gap          = 0;
                                lineEnd[k].X = x1;
                                lineEnd[k].Y = y1;
                            }
                            else if (++gap > maxLineGap)
                            {
                                break;
                            }
                        }
                    }

                    // lineLengthより長いものを線分候補とする
                    bool goodLine = Math.Abs(lineEnd[1].X - lineEnd[0].X) >= minLineLength ||
                                    Math.Abs(lineEnd[1].Y - lineEnd[0].Y) >= minLineLength;

                    // 2.4 追尾した画素を削除し、次回以降は処理されないようにする
                    //if (processOnce)
                    {
                        for (int k = 0; k < 2; k++)
                        {
                            int x = x0, y = y0, dx = dx0, dy = dy0;

                            if (k > 0)
                            {
                                dx = -dx;
                                dy = -dy;
                            }

                            // walk along the line using fixed-point arithmetics,
                            // stop at the image border or in case of too big gap
                            for (; ; x += dx, y += dy)
                            {
                                int x1, y1;

                                if (xflag)
                                {
                                    x1 = x;
                                    y1 = y >> Shift;
                                }
                                else
                                {
                                    x1 = x >> Shift;
                                    y1 = y;
                                }

                                // for each non-zero point:
                                //    update line end,
                                //    clear the mask element
                                //    reset the gap
                                if (mask[y1 * width + x1])
                                {
                                    if (goodLine)
                                    {
                                        fixed(int *paccum = accum)
                                        {
                                            int *adata = paccum;

                                            for (int t = angleMin; t < angleMax; t++, adata += numRho)
                                            {
                                                int r = (int)Math.Round(x1 * cos[t] + y1 * sin[t]);
                                                r += (numRho - 1) / 2;
                                                adata[r]--;
                                            }
                                        }
                                    }
                                    mask[y1 * width + x1] = false;
                                }

                                if (y1 == lineEnd[k].Y && x1 == lineEnd[k].X)
                                {
                                    break;
                                }
                            }
                        }
                    }

                    if (goodLine)
                    {
                        result.Add(new CvLineSegmentPoint(lineEnd[0], lineEnd[1]));
                    }
                }

                return(result.ToArray());
            }
        }
Esempio n. 23
0
 //---------------------------------------------------------
 // 関数名 : convertMatToIplImage
 // 機能   : MatからIplImageへ変換
 // 引数   : なし
 // 戻り値 : img/カメラ画像
 //---------------------------------------------------------
 private IplImage convertMatToIplImage(Mat m_img)
 {
     i_img = m_img.ToIplImage();
     return(i_img);
 }
Esempio n. 24
0
        private void VideoPlay(string fname_front, string fname_rear)
        {
            vc_front = new VideoCapture(fname_front);
            vc_rear  = new VideoCapture(fname_rear);

            //트랙바 초기화
            metroTrackBar2.Minimum = 0;
            metroTrackBar2.Maximum = vc_front.FrameCount;

            //재생버튼이 일시정지로 바뀌어야 함

            //재생설정 초기화
            int sleepTime = 1000 * 19 / vc_front.FrameCount; //(int)Math.Round(1000 / vc_front.Fps);
            Mat img       = new Mat();

            timer1.Enabled = true;
            //timer1.Interval = GsensorVal[0].Count / (int)vc_front.Fps;
            timer1.Start();
            //현재재생프레임이 저장될 변수

            while (true)
            {
                if (FrontRearFlag == true) //전방
                {
                    //vc_front.XI_AutoWB = 100000.0;
                    if (posFrame - vc_front.PosFrames > 1) //화면전환 일어난 시점에서만 최신 posframe 대입
                    {
                        vc_front.PosFrames = posFrame;
                    }

                    vc_front.Read(img); //프레임읽기
                    IplImage tmp = new IplImage();
                    tmp = img.ToIplImage();

                    if (img.Empty())
                    {
                        vc_front.Release();
                        timer1.Stop();
                        break;
                    }
                    if (brightnessFlag == true || contrastFlag == true) //밝기,명암 조절
                    {
                        tmp = brightnessChanged(img.ToIplImage());
                        tmp = contrastChanged(tmp);
                    }

                    pictureBoxIpl1.ImageIpl = tmp;
                    posFrame = vc_front.PosFrames;
                    Delay(sleepTime);
                }
                else
                {
                    //vc_rear.XI_AutoWB = 100000.0;
                    if (posFrame - vc_rear.PosFrames > 1) //화면전환 일어난 시점에서만 최신 posframe 대입
                    {
                        vc_rear.PosFrames = posFrame;
                    }

                    vc_rear.Read(img);
                    IplImage tmp = new IplImage();
                    tmp = img.ToIplImage();

                    if (img.Empty())
                    {
                        vc_rear.Release();
                        timer1.Stop();
                        break;
                    }
                    if (brightnessFlag == true || contrastFlag == true) //밝기,명암 조절
                    {
                        tmp = brightnessChanged(img.ToIplImage());
                        tmp = contrastChanged(tmp);
                    }

                    pictureBoxIpl1.ImageIpl = screenContrast(tmp);
                    posFrame = vc_rear.PosFrames;
                    Delay(sleepTime);
                }

                //Cv2.WaitKey(sleepTime);

                //vc.Release(); // 조건 추가해서 해제할것!!! 종료이벤트나 재생중 종료할때


                /*
                 * capture = CvCapture.FromFile(temp_name);
                 *
                 * timer1.Interval = (int)(1000 / capture.Fps);
                 *
                 * timer1.Enabled = true;
                 */
            }

            ////////////////////////////////////////////////////////////////////////////////////할당 해제
        }
Esempio n. 25
0
        // TCP待ち受け
        public async void ListenData()
        {
            // 待ち受けアドレス、ポートの設定
            string localhost = Dns.GetHostName();
            string str_ipad  = null;

            IPAddress[] adrList = Dns.GetHostAddresses(localhost);
            foreach (IPAddress address in adrList)
            {
                if (address.AddressFamily == AddressFamily.InterNetwork)
                {
                    str_ipad = address.ToString();
                    break;
                }
            }
            IPAddress   ipad     = IPAddress.Parse(str_ipad);
            string      str_port = (string)textBox2.Dispatcher.Invoke(new readTextDelegate(readPortNum));
            Int32       port     = Int32.Parse(str_port);
            TcpListener tl       = new TcpListener(ipad, port);

            tl.Start();

            // メッセージの処理
            while (true)
            {
                TcpClient     tc = tl.AcceptTcpClient();
                NetworkStream ns = tc.GetStream();

                // 受信したデータがなくなるまで繰り返す
                var typ = new byte[1];
                var len = new byte[4];
                while (ns.Read(typ, 0, typ.Length) != 0)
                {
                    ns.Read(len, 0, len.Length);
                    int    num = BitConverter.ToInt32(len, 0);
                    byte[] data;

                    switch (typ[0])
                    {
                    case 0:     // テキストデータの処理
                        data = new byte[num];
                        ns.Read(data, 0, data.Length);
                        var str = Encoding.GetEncoding(encode).GetString(data);
                        Dispatcher.Invoke(new writeTextDelegate(writeTextData), new object[] { str });
                        break;

                    case 1:     // 画像データの処理
                        int readsize = 0;
                        data = new byte[num];
                        while (readsize < num)
                        {
                            readsize += ns.Read(data, readsize, num - readsize);
                        }
                        BitmapImage bitmapImage = LoadImage(data);
                        Bitmap      bitmap      = BitmapImage2Bitmap(bitmapImage);
                        //Dispatcher.Invoke(new writeImageDelegate(writeImageData), new object[] { bitmap });
                        bitmap.Save(faceR, ImageFormat.Jpeg);
                        break;

                    default:
                        break;
                    }
                }

                if (File.Exists(faceR))
                {
                    // 感情情報を取得
                    Emotion[] response = await UploadAndDetectEmotions(faceR);

                    File.Delete(faceR);
                    if (response == null)
                    {
                        //MessageBox.Show("Error occured!");
                    }
                    else
                    {
                        Dictionary <string, float> scores = new Dictionary <string, float>();
                        foreach (Emotion emo in response)
                        {
                            scores.Add("Anger", emo.Scores.Anger);
                            scores.Add("Contempt", emo.Scores.Contempt);
                            scores.Add("Disgust", emo.Scores.Disgust);
                            scores.Add("Fear", emo.Scores.Fear);
                            scores.Add("Happiness", emo.Scores.Happiness);
                            scores.Add("Neutral", emo.Scores.Neutral);
                            scores.Add("Sadness", emo.Scores.Sadness);
                            scores.Add("Surprise", emo.Scores.Surprise);
                        }
                        emotionR = scores.OrderByDescending((x) => x.Value).First().Key;
                    }

                    // 行末にスタンプを挿入
                    IplImage stamp   = stamps[emotionR];
                    IplImage resized = new IplImage(15, 15, stamp.Depth, stamp.NChannels);
                    Cv.Resize(stamp, resized);
                    Bitmap bmp = BitmapConverter.ToBitmap(resized);
                    Dispatcher.Invoke(new writeImageDelegate(writeImageData), new object[] { bmp });

                    // 改行
                    Dispatcher.Invoke(new writeTextDelegate(writeTextData), new object[] { "\n" });
                }

                tc.Close();
            }

            tl.Stop();
        }
Esempio n. 26
0
        public MainWindow()
        {
            InitializeComponent();

            // カメラを設定
            camera = Cv.CreateCameraCapture(0);

            // 設定読み込み
            StreamReader sr     = new StreamReader(json, Encoding.GetEncoding(encode));
            var          config = JsonConvert.DeserializeObject <Config>(sr.ReadToEnd());

            textBox1.Text = config.RemoteAddress;
            textBox2.Text = config.PortNumber;
            textBox3.Text = config.UserName;

            // スタンプ画像を生成
            stamps = new Dictionary <string, IplImage>();
            IplImage sheet = Cv.LoadImage(srcimg);
            Dictionary <string, List <int> > emotions = new Dictionary <string, List <int> >();

            emotions.Add("Anger", new List <int>()
            {
                0, 0, 120, 120
            });
            emotions.Add("Contempt", new List <int>()
            {
                140, 140, 120, 120
            });
            emotions.Add("Disgust", new List <int>()
            {
                140, 140, 120, 120
            });
            emotions.Add("Fear", new List <int>()
            {
                140, 0, 120, 120
            });
            emotions.Add("Happiness", new List <int>()
            {
                280, 140, 120, 120
            });
            emotions.Add("Neutral", new List <int>()
            {
                140, 280, 120, 120
            });
            emotions.Add("Sadness", new List <int>()
            {
                280, 0, 120, 120
            });
            emotions.Add("Surprise", new List <int>()
            {
                0, 280, 120, 120
            });
            foreach (var key in emotions.Keys)
            {
                IplImage trimmed = new IplImage();
                IplImage stamp   = new IplImage((int)image1.Width, (int)image1.Height, sheet.Depth, sheet.NChannels);
                trimmed = trimming(sheet, emotions[key][0], emotions[key][1], emotions[key][2], emotions[key][3]);
                Cv.Resize(trimmed, stamp, Interpolation.NearestNeighbor);
                stamps.Add(key, stamp);
            }

            // タイマーを設定
            dispatcherTimer          = new DispatcherTimer(DispatcherPriority.Normal);
            dispatcherTimer.Interval = new TimeSpan(0, 0, 0, 0, 30);
            dispatcherTimer.Tick    += new EventHandler(dispatcherTimer_Tick);
            dispatcherTimer.Start();

            // 入力欄にフォーカス
            FocusManager.SetFocusedElement(FocusManager.GetFocusScope(textBox), textBox);

            // TCP待ち受けスレッド生成
            Thread t = new Thread(new ThreadStart(ListenData));

            t.IsBackground = true;
            t.Start();
        }
        /// <summary>
        /// IplImageをWriteableBitmapに変換する (dpi=96, BitmapPalette=null)
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <returns>WPFのWriteableBitmap</returns>
#else
        /// <summary>
        /// Converts IplImage to WriteableBitmap (dpi=96, BitmapPalette=null)
        /// </summary>
        /// <param name="src">Input IplImage</param>
        /// <returns>WriteableBitmap</returns>
#endif
        public static WriteableBitmap ToWriteableBitmap(this IplImage src)
        {
            PixelFormat pf = GetOptimumPixelFormats(src.Depth, src.NChannels);

            return(ToWriteableBitmap(src, 96, 96, pf, null));
        }
Esempio n. 28
0
        /// <summary>
        /// Set the ROI of an image to the bounding box of a blob.
        /// </summary>
        /// <param name="img">Image.</param>
        public void SetImageROIToBlob(IplImage img)
        {
            if (IsDisposed)
                throw new ObjectDisposedException("CvBlob");

            CvBlobLib.SetImageROItoBlob(img, this);
        }
Esempio n. 29
0
 /// <summary>
 /// フルカラー画像(24bit)をグレースケール画像(8bit)に変換する
 /// mImageFull --> mImageGray
 /// </summary>
 private void convertFullToGray()
 {
     mImageGray = new IplImage(mImageFull.GetSize(), BitDepth.U8, 1);
     mImageFull.CvtColor(mImageGray, ColorConversion.BgrToGray);
 }
Esempio n. 30
0
 public override bool Check(IplImage screenImage) => Match(screenImage, IplImages.clickableImage);
        /// <summary>
        /// System.Drawing.BitmapからOpenCVのIplImageへ変換して返す.
        /// </summary>
        /// <param name="src">変換するSystem.Drawing.Bitmap</param>
        /// <param name="dst">変換結果を格納するIplImage</param>
#else
        /// <summary>
        /// Converts System.Drawing.Bitmap to IplImage
        /// </summary>
        /// <param name="src">System.Drawing.Bitmap object to be converted</param>
        /// <param name="dst">An IplImage object which is converted from System.Drawing.Bitmap</param>
#endif
        public static unsafe void ToIplImage(this Bitmap src, IplImage dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }
            if (dst.IsDisposed)
            {
                throw new ArgumentException("The specified dst is disposed.", "dst");
            }
            if (dst.Depth != BitDepth.U8)
            {
                throw new NotSupportedException();
            }
            if (src.Width != dst.Width || src.Height != dst.Height)
            {
                throw new ArgumentException("Size of src must be equal to size of dst.");
            }

            int        w    = src.Width;
            int        h    = src.Height;
            Rectangle  rect = new Rectangle(0, 0, w, h);
            BitmapData bd   = null;

            try
            {
                bd = src.LockBits(rect, ImageLockMode.ReadOnly, src.PixelFormat);


                byte *p         = (byte *)bd.Scan0.ToPointer();
                int   stride    = bd.Stride;
                int   offset    = stride - (w / 8);
                int   widthStep = dst.WidthStep;
                byte *imageData = (byte *)dst.ImageData.ToPointer();

                switch (src.PixelFormat)
                {
                case PixelFormat.Format1bppIndexed:
                {
                    if (dst.NChannels != 1)
                    {
                        throw new ArgumentException("Invalid nChannels");
                    }
                    int  x = 0;
                    int  y;
                    int  bytePos;
                    byte b;
                    int  i;
                    for (y = 0; y < h; y++)
                    {
                        // 横は必ず4byte幅に切り上げられる。
                        // この行の各バイトを調べていく
                        for (bytePos = 0; bytePos < stride; bytePos++)
                        {
                            if (x < w)
                            {
                                // 現在の位置のバイトからそれぞれのビット8つを取り出す
                                b = p[bytePos];
                                for (i = 0; i < 8; i++)
                                {
                                    if (x >= w)
                                    {
                                        break;
                                    }
                                    // IplImageは8bit/pixel
                                    imageData[widthStep * y + x] = ((b & 0x80) == 0x80) ? (byte)255 : (byte)0;
                                    b <<= 1;
                                    x++;
                                }
                            }
                        }
                        // 次の行へ
                        x  = 0;
                        p += stride;
                    }
                }
                break;

                case PixelFormat.Format8bppIndexed:
                {
                    if (dst.NChannels != 1)
                    {
                        throw new ArgumentException("Invalid nChannels");
                    }

                    /*for (int y = 0; y < h; y++)
                     *  {
                     *      for (int x = 0; x < w; x++)
                     *      {
                     *          imageData[y * widthStep + x] = p[y * stride + x];
                     *      }
                     *  }*/
                    Util.CopyMemory(dst.ImageData, bd.Scan0, dst.ImageSize);
                }
                break;

                case PixelFormat.Format24bppRgb:
                {
                    if (dst.NChannels != 3)
                    {
                        throw new ArgumentException("Invalid nChannels");
                    }

                    /*for (int y = 0; y < h; y++)
                     *  {
                     *      for (int x = 0; x < w; x++)
                     *      {
                     *          imageData[y * widthStep + x * 3] = p[y * stride + x * 3];
                     *          imageData[y * widthStep + x * 3 + 1] = p[y * stride + x * 3 + 1];
                     *          imageData[y * widthStep + x * 3 + 2] = p[y * stride + x * 3 + 2];
                     *      }
                     *  }*/
                    Util.CopyMemory(dst.ImageData, bd.Scan0, dst.ImageSize);
                }
                break;

                case PixelFormat.Format32bppRgb:
                case PixelFormat.Format32bppArgb:
                case PixelFormat.Format32bppPArgb:
                {
                    switch (dst.NChannels)
                    {
                    case 4:
                        Util.CopyMemory(dst.ImageData, bd.Scan0, dst.ImageSize);
                        break;

                    case 3:
                        for (int y = 0; y < h; y++)
                        {
                            for (int x = 0; x < w; x++)
                            {
                                imageData[y * widthStep + x * 3]     = p[y * stride + x * 4 + 0];
                                imageData[y * widthStep + x * 3 + 1] = p[y * stride + x * 4 + 1];
                                imageData[y * widthStep + x * 3 + 2] = p[y * stride + x * 4 + 2];
                            }
                        }
                        break;

                    default:
                        throw new ArgumentException("Invalid nChannels");
                    }
                }
                break;
                }
            }
            finally
            {
                if (bd != null)
                {
                    src.UnlockBits(bd);
                }
            }
        }
        /// <summary>
        /// OpenCVのIplImageを指定した出力先にSystem.Drawing.Bitmapとして変換する
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="dst">出力先のSystem.Drawing.Bitmap</param>
        /// <remarks>Author: shimat, Gummo (ROI support)</remarks>
#else
        /// <summary>
        /// Converts IplImage to System.Drawing.Bitmap
        /// </summary>
        /// <param name="src">Mat</param>
        /// <param name="dst">IplImage</param>
        /// <remarks>Author: shimat, Gummo (ROI support)</remarks>
#endif
        public static unsafe void ToBitmap(this IplImage src, Bitmap dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }
            if (src.IsDisposed)
            {
                throw new ArgumentException("The image is disposed.", "src");
            }
            //if (src.Depth != BitDepth.U8)
            //    throw new ArgumentOutOfRangeException("src");
            if (src.ROI.Width != dst.Width || src.ROI.Height != dst.Height)
            {
                throw new ArgumentException("");
            }

            PixelFormat pf = dst.PixelFormat;

            // 1プレーン用の場合、グレースケールのパレット情報を生成する
            if (pf == PixelFormat.Format8bppIndexed)
            {
                ColorPalette plt = dst.Palette;
                for (int x = 0; x < 256; x++)
                {
                    plt.Entries[x] = Color.FromArgb(x, x, x);
                }
                dst.Palette = plt;
            }

            // BitDepth.U8以外の場合はスケーリングする
            IplImage _src;

            if (src.Depth != BitDepth.U8)
            {
                _src = new IplImage(src.Size, BitDepth.U8, src.NChannels);
                using (IplImage f = src.Clone())
                {
                    if (src.Depth == BitDepth.F32 || src.Depth == BitDepth.F64)
                    {
                        Cv.Normalize(src, f, 255, 0, NormType.MinMax);
                    }
                    Cv.ConvertScaleAbs(f, _src);
                }
            }
            else
            {
                _src = src;
            }
            Bitmap _dst = dst;

            int        w    = _src.ROI.Width;
            int        h    = _src.ROI.Height;
            Rectangle  rect = new Rectangle(0, 0, w, h);
            BitmapData bd   = null;

            try
            {
                bd = _dst.LockBits(rect, ImageLockMode.WriteOnly, pf);

                byte *psrc         = (byte *)(_src.ImageData.ToPointer());
                byte *pdst         = (byte *)(bd.Scan0.ToPointer());
                int   xo           = _src.ROI.X;
                int   yo           = _src.ROI.Y;
                int   widthStepSrc = _src.WidthStep;
                int   widthStepDst = ((_src.ROI.Width * _src.NChannels) + 3) / 4 * 4; // 4の倍数に揃える
                int   stride       = bd.Stride;
                int   ch           = _src.NChannels;

                switch (pf)
                {
                case PixelFormat.Format1bppIndexed:
                {
                    // BitmapDataは4byte幅だが、IplImageは1byte幅
                    // 手作業で移し替える
                    //int offset = stride - (w / 8);
                    int  x = xo;
                    int  y;
                    int  bytePos;
                    byte mask;
                    byte b = 0;
                    int  i;
                    for (y = yo; y < h; y++)
                    {
                        for (bytePos = 0; bytePos < stride; bytePos++)
                        {
                            if (x < w)
                            {
                                for (i = 0; i < 8; i++)
                                {
                                    mask = (byte)(0x80 >> i);
                                    if (x < w && psrc[widthStepSrc * y + x] == 0)
                                    {
                                        b &= (byte)(mask ^ 0xff);
                                    }
                                    else
                                    {
                                        b |= mask;
                                    }

                                    x++;
                                }
                                pdst[bytePos] = b;
                            }
                        }
                        x     = xo;
                        pdst += stride;
                    }
                    break;
                }

                case PixelFormat.Format8bppIndexed:
                case PixelFormat.Format24bppRgb:
                case PixelFormat.Format32bppArgb:
                    if (widthStepSrc == widthStepDst && _src.ROI.Size == _src.Size)
                    {
                        Util.CopyMemory(pdst, psrc, _src.ImageSize);
                    }
                    else
                    {
                        for (int y = 0; y < h; y++)
                        {
                            int offsetSrc = ((y + yo) * widthStepSrc) + (xo * ch);
                            int offsetDst = (y * widthStepDst);

                            /*
                             * for (int x = 0; x < _src.ROI.Width; x++)
                             * {
                             *  pdst[x + offset_dst] = psrc[x + offset_src];
                             * }
                             * //*/
                            // 一行ごとにコピー
                            Util.CopyMemory(pdst + offsetDst, psrc + offsetSrc, w * ch);
                        }
                    }
                    break;

                default:
                    throw new NotImplementedException();
                }
            }
            finally
            {
                _dst.UnlockBits(bd);
            }

            // 反転対策
            if (src.Origin == ImageOrigin.BottomLeft)
            {
                _dst.RotateFlip(RotateFlipType.RotateNoneFlipY);
            }

            // スケーリングのために余分に作ったインスタンスの破棄
            if (_src != src)
            {
                _src.Dispose();
            }
        }
Esempio n. 33
0
    // Update is called once per frame
    void Update()
    {
        /*     デバッグ用(FPS)     */
        frameCount++;
        float time = Time.realtimeSinceStartup - prevTime;

        /* ------------------------- */

        // カメラ画像の取得
        i_img1 = cam1.getCameraImage();
        i_img2 = cam2.getCameraImage();

        // カメラ画像をBGRからHSVへ変換
        g.convertBgrToHsv(i_img1, h_img1);
        g.convertBgrToHsv(i_img2, h_img2);

        // 平滑化
        g.convertSmooothing(h_img1);
        g.convertSmooothing(h_img2);

        // カメラ画像の任意の点からデータ取得
        g.getPointData(h_img1, hps_arr);
        g.getPointData(h_img2, vps_arr);

        // 縦横の点情報を結合して3次元配列に格納
        bondPosStaArr(vps_arr, hps_arr, ps_arr3D);

        // 情報が存在するレイヤー(Y軸方向)において、内部外部判定を行う
        for (int y = 0; y < GlobalVar.CAMERA_HEIGHT / GlobalVar.POINT_INTERVAL; y++)
        {
            // 情報が存在する場合
            if (isExsistInfo3DArrY(ps_arr3D, y))
            {
                // 同じ階層にポリゴンがある場合
                if (polygon.isExsistPolygon(y * GlobalVar.POINT_INTERVAL))
                {
                    Debug.Log("同じ階層にポリゴンあり");
                    if (isInsideOrOutside(io_flag, ps_arr3D, y))
                    {
                        Debug.Log("内部に手あり");
                        polygon.overrideXYData(io_flag, y);

                        // 観測点データを初期化
                        init3DArr(ps_arr3D);
                        initMFlag(io_flag);

                        // 図形と観測点の内部外部判定を行う
                        polygon.getIODMonitoringPoint(io_flag);

                        break;
                    }
                }
            }
        }

        displayIDot(io_flag);

        // 物体を3D表示
        display3Ddot(ps_arr3D);

        /*     デバッグ用(FPS)     */
        if (time >= 0.5f)
        {
            //Debug.LogFormat("{0}fps", frameCount/time);
            frameCount = 0;
            prevTime   = Time.realtimeSinceStartup;
        }
        /* ------------------------- */
    }
        /// <summary>
        /// WriteableBitmapをIplImageに変換する.
        /// </summary>
        /// <param name="src">変換するWriteableBitmap</param>
        /// <param name="dst">出力先のIplImage</param>
#else
        /// <summary>
        /// Converts WriteableBitmap to IplImage
        /// </summary>
        /// <param name="src">Input WriteableBitmap</param>
        /// <param name="dst">Output IplImage</param>
#endif
        public static void ToIplImage(this WriteableBitmap src, IplImage dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException(nameof(src));
            }
            if (dst == null)
            {
                throw new ArgumentNullException(nameof(dst));
            }
            if (src.PixelWidth != dst.Width || src.PixelHeight != dst.Height)
            {
                throw new ArgumentException("size of src must be equal to size of dst");
            }
            //if (dst.Depth != BitDepth.U8)
            //    throw new ArgumentException("bit depth of dst must be BitDepth.U8", "dst");

            int w        = src.PixelWidth;
            int h        = src.PixelHeight;
            int bpp      = src.Format.BitsPerPixel;
            int channels = GetOptimumChannels(src.Format);

            if (dst.NChannels != channels)
            {
                throw new ArgumentException("nChannels of dst is invalid", nameof(dst));
            }

            unsafe
            {
                byte *p         = (byte *)(dst.ImageData.ToPointer());
                int   widthStep = dst.WidthStep;

                // 1bppは手作業でコピー
                if (bpp == 1)
                {
                    // BitmapImageのデータを配列にコピー
                    // 要素1つに横8ピクセル分のデータが入っている。
                    int    stride = (w / 8) + 1;
                    byte[] pixels = new byte[h * stride];
                    src.CopyPixels(pixels, stride, 0);

                    int x = 0;
                    for (int y = 0; y < h; y++)
                    {
                        int offset = y * stride;
                        // この行の各バイトを調べていく
                        for (int bytePos = 0; bytePos < stride; bytePos++)
                        {
                            if (x < w)
                            {
                                // 現在の位置のバイトからそれぞれのビット8つを取り出す
                                byte b = pixels[offset + bytePos];
                                for (int i = 0; i < 8; i++)
                                {
                                    if (x >= w)
                                    {
                                        break;
                                    }
                                    // IplImageは8bit/pixel
                                    p[widthStep * y + x] = ((b & 0x80) == 0x80) ? (byte)255 : (byte)0;
                                    b <<= 1;
                                    x++;
                                }
                            }
                        }
                        // 次の行へ
                        x = 0;
                    }
                }
                // 8bpp
                else if (bpp == 8)
                {
                    int    stride = w;
                    byte[] pixels = new byte[h * stride];
                    src.CopyPixels(pixels, stride, 0);
                    for (int y = 0; y < h; y++)
                    {
                        for (int x = 0; x < w; x++)
                        {
                            p[widthStep * y + x] = pixels[y * stride + x];
                        }
                    }
                }
                // 24bpp, 32bpp, ...
                else
                {
                    int stride = w * ((bpp + 7) / 8);
                    src.CopyPixels(Int32Rect.Empty, dst.ImageData, dst.ImageSize, stride);
                }
            }
        }
        /// <summary>
        /// IplImageをWriteableBitmapに変換する (dpi=96, BitmapPalette=null)
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="pf">ビットマップの PixelFormat</param>
        /// <returns>WPFのWriteableBitmap</returns>
#else
        /// <summary>
        /// Converts IplImage to WriteableBitmap (dpi=96, BitmapPalette=null)
        /// </summary>
        /// <param name="src">Input IplImage</param>
        /// <param name="pf">Pixel format of output WriteableBitmap</param>
        /// <returns>WriteableBitmap</returns>
#endif
        public static WriteableBitmap ToWriteableBitmap(this IplImage src, PixelFormat pf)
        {
            return(ToWriteableBitmap(src, 96, 96, pf, null));
        }
Esempio n. 36
0
        /// <summary>
        /// 初期化
        /// </summary>
        /// <param name="image"></param>
#else
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="image"></param>
#endif
        public CvWindowEx(IplImage image) : this()
        {
            Image = image;
        }
        /// <summary>
        /// IplImageをWriteableBitmapに変換する.
        /// 返却値を新たに生成せず引数で指定したWriteableBitmapに格納するので、メモリ効率が良い。
        /// </summary>
        /// <param name="src">変換するIplImage</param>
        /// <param name="dst">変換結果を設定するWriteableBitmap</param>
#else
        /// <summary>
        /// Converts IplImage to WriteableBitmap.
        /// This method is more efficient because new instance of WriteableBitmap is not allocated.
        /// </summary>
        /// <param name="src">Input IplImage</param>
        /// <param name="dst">Output WriteableBitmap</param>
#endif
        public static void ToWriteableBitmap(IplImage src, WriteableBitmap dst)
        {
            if (src == null)
            {
                throw new ArgumentNullException("src");
            }
            if (dst == null)
            {
                throw new ArgumentNullException("dst");
            }
            if (src.Width != dst.PixelWidth || src.Height != dst.PixelHeight)
            {
                throw new ArgumentException("size of src must be equal to size of dst");
            }
            //if (src.Depth != BitDepth.U8)
            //throw new ArgumentException("bit depth of src must be BitDepth.U8", "src");

            int w   = src.Width;
            int h   = src.Height;
            int bpp = dst.Format.BitsPerPixel;

            int channels = GetOptimumChannels(dst.Format);

            if (src.NChannels != channels)
            {
                throw new ArgumentException("PixelFormat of dst is invalid", "dst");
            }

            // 左下原点の場合は上下反転する
            IplImage ipl;

            if (src.Origin == ImageOrigin.TopLeft)
            {
                ipl = src;
            }
            else
            {
                ipl = src.Clone();
                Cv.Flip(src, ipl, FlipMode.X);
            }

            if (bpp == 1)
            {
                unsafe
                {
                    // 手作業で移し替える
                    int stride = w / 8 + 1;
                    if (stride < 2)
                    {
                        stride = 2;
                    }
                    byte[] pixels    = new byte[h * stride];
                    byte * p         = (byte *)(ipl.ImageData.ToPointer());
                    int    widthStep = src.WidthStep;
                    int    x         = 0;
                    for (int y = 0; y < h; y++)
                    {
                        int offset = y * stride;
                        for (int bytePos = 0; bytePos < stride; bytePos++)
                        {
                            if (x < w)
                            {
                                byte b = 0;
                                // 現在の位置から横8ピクセル分、ビットがそれぞれ立っているか調べ、1つのbyteにまとめる
                                for (int i = 0; i < 8; i++)
                                {
                                    b <<= 1;
                                    if (x < w && p[widthStep * y + x] != 0)
                                    {
                                        b |= 1;
                                    }
                                    x++;
                                }
                                pixels[offset + bytePos] = b;
                            }
                        }
                        x = 0;
                    }
                    dst.WritePixels(new Int32Rect(0, 0, w, h), pixels, stride, 0);
                }
            }
            else
            {
                dst.WritePixels(new Int32Rect(0, 0, w, h), ipl.ImageData, ipl.ImageSize, ipl.WidthStep);
            }

            if (src.Origin == ImageOrigin.BottomLeft)
            {
                ipl.Dispose();
            }
        }
Esempio n. 38
0
 /************************ The processing methods *********************************/
 /// <summary>
 /// update object position
 /// </summary>
 /// <param name="curFrame"></param>
 /// <returns></returns>
 public virtual bool TrackObject(IplImage curFrame)
 {
     if (disposed)
         throw new ObjectDisposedException("CvCamShiftTracker");
     if (curFrame == null)
         throw new ArgumentNullException("curFrame");
     return CppInvoke.CvCamShiftTracker_track_object(ptr, curFrame.CvPtr);
 }
Esempio n. 39
0
        // Sendボタン押下時
        private async void button_Click(object sender, RoutedEventArgs e)
        {
            // TCPクライアントの設定
            string        HostName = textBox1.Text;
            int           port     = Int32.Parse(textBox2.Text);
            TcpClient     tc       = new TcpClient(HostName, port);
            NetworkStream ns       = tc.GetStream();

            // Messageの送信
            if (ns.CanWrite)
            {
                // 送信データのタイプ(テキスト)
                var typ = new byte[1];
                typ[0] = 0x0000;

                // 送信するデータ
                string str = textBox3.Text + " > " + textBox.Text + " ";
                textBox.Clear();
                var mesg = Encoding.GetEncoding(encode).GetBytes(str);

                // データの長さ
                var len = BitConverter.GetBytes(mesg.Length);

                // タイプとデータ本体を結合して送信
                var bary = typ.Concat(len).Concat(mesg).ToArray();
                ns.Write(bary, 0, bary.Length);

                // テキストボックスに書き出し
                writeTextData(str);
            }

            // 画像の送信
            if (ns.CanWrite && face != null)
            {
                // 送信データのタイプ(画像)
                var typ = new byte[1];
                typ[0] = 0x0001;

                // 画像データの内容をコピー
                Bitmap bitmap = BitmapConverter.ToBitmap(face);
                bitmap.Save(faceL, ImageFormat.Jpeg);
                MemoryStream ms = new MemoryStream();
                bitmap.Save(ms, ImageFormat.Bmp);
                var img = ms.GetBuffer();

                // データの長さ
                var len = BitConverter.GetBytes(img.Length);

                // タイプと長さとデータ本体を連結して送信
                var bary = typ.Concat(len).Concat(img).ToArray();
                ns.Write(bary, 0, bary.Length);
            }
            ns.Close();
            tc.Close();

            if (File.Exists(faceL))
            {
                // 感情情報を取得
                Emotion[] response = await UploadAndDetectEmotions(faceL);

                File.Delete(faceL);
                if (response == null)
                {
                    //MessageBox.Show("Error occured!");
                }
                else
                {
                    Dictionary <string, float> scores = new Dictionary <string, float>();
                    foreach (Emotion emo in response)
                    {
                        scores.Add("Anger", emo.Scores.Anger);
                        scores.Add("Contempt", emo.Scores.Contempt);
                        scores.Add("Disgust", emo.Scores.Disgust);
                        scores.Add("Fear", emo.Scores.Fear);
                        scores.Add("Happiness", emo.Scores.Happiness);
                        scores.Add("Neutral", emo.Scores.Neutral);
                        scores.Add("Sadness", emo.Scores.Sadness);
                        scores.Add("Surprise", emo.Scores.Surprise);
                    }
                    emotionL = scores.OrderByDescending((x) => x.Value).First().Key;
                }

                // 行末にスタンプを挿入
                IplImage stamp   = stamps[emotionL];
                IplImage resized = new IplImage(15, 15, stamp.Depth, stamp.NChannels);
                Cv.Resize(stamp, resized);
                Bitmap bmp = BitmapConverter.ToBitmap(resized);
                writeImageData(bmp);

                // 改行
                richTextBox.AppendText("\n");
                richTextBox.ScrollToEnd();
            }
        }
        public void update_mhi(IplImage imgMain, ref IplImage imgDst, int diff_threshold)
        {
            double timestamp = (double)DateTime.Now.Second;
            CvSize size = new CxCore.CvSize(imgMain.width, imgMain.height);
            int i, idx1 = last, idx2;
            IplImage silh;
            CvSeq seq;
            CvRect comp_rect;
            double count;
            double angle;
            CvPoint center;
            double magnitude;
            CvScalar color;

            //allocate images at the beginning or reallocate them if the frame size is changed
            if (mhi.ptr == null || mhi.width != size.width || mhi.height != size.height)
            {
                for (i = 0; i < N; i++)
                {
                    buf[i] = cxcore.CvCreateImage(size, (int)cxtypes.IPL_DEPTH_8U, 1);
                    cxcore.CvZero(ref buf[i]);
                }
                cxcore.CvReleaseImage(ref mhi);
                cxcore.CvReleaseImage(ref orient);
                cxcore.CvReleaseImage(ref segmask);
                cxcore.CvReleaseImage(ref mask);

                mhi = cxcore.CvCreateImage(size, (int)cxtypes.IPL_DEPTH_32F, 1);
                cxcore.CvZero(ref mhi);
                orient = cxcore.CvCreateImage(size, (int)cxtypes.IPL_DEPTH_32F, 1);
                segmask = cxcore.CvCreateImage(size, (int)cxtypes.IPL_DEPTH_32F, 1);
                mask = cxcore.CvCreateImage(size, (int)cxtypes.IPL_DEPTH_32F, 1);
            }

            cv.CvCvtColor(ref imgMain, ref buf[last], cvtypes.CV_BGR2GRAY);

            idx2 = (last + 1) % N;
            last = idx2;

            silh = buf[idx2];
            cxcore.CvAbsDiff(ref buf[idx1], ref buf[idx2], ref silh);

            cv.CvThreshold(ref silh, ref silh, diff_threshold, 1, cv.CV_THRESH_BINARY);
            cv.CvUpdateMotionHistory(ref silh, ref mhi, timestamp, MHI_DURATION);

            cxcore.CvConvertScale(ref mhi, ref mask, 255 / MHI_DURATION, (MHI_DURATION - timestamp) * 255 / MHI_DURATION);
            cxcore.CvZero(ref imgDst);
            cxcore.CvMerge(ref mask, ref imgDst);
            cv.CvCalcMotionGradient(ref mhi, ref mask, ref orient, MAX_TIME_DELTA, MIN_TIME_DELTA, 3);
            if (storage.ptr == null)
                storage = cxcore.CvCreateMemStorage();
            else
                cxcore.CvClearMemStorage(ref storage);
            seq = cv.CvSegmentMotion(ref mhi, ref segmask, ref storage, timestamp, MAX_TIME_DELTA);
            for (i = -1; i < seq.total; i++)
            {
                if (i < 0)
                {
                    comp_rect = new CvRect(0, 0, size.width, size.height);
                    color = cxcore.CV_RGB(255, 255, 255);
                    magnitude = 100;
                }
                else
                {
                    IntPtr ptr = cxcore.CvGetSeqElem(ref seq, i);
                    CvConnectedComp c = (CvConnectedComp)cvconvert.PtrToType(ptr, typeof(CvConnectedComp));
                    comp_rect = c.rect;
                    if (comp_rect.width + comp_rect.height < 100)
                        continue;
                    color = cxcore.CV_RGB(255, 0, 0);
                    magnitude = 30;
                }

                //select component ROI
                cxcore.CvSetImageROI(ref silh, comp_rect);
                cxcore.CvSetImageROI(ref mhi, comp_rect);
                cxcore.CvSetImageROI(ref orient, comp_rect);
                cxcore.CvSetImageROI(ref mask, comp_rect);

                //calculate orientation
                angle = cv.CvCalcGlobalOrientation(ref orient, ref mask, ref mhi, timestamp, MHI_DURATION);
                angle = 360 - angle;

                count = cxcore.CvNorm(ref silh); //<<<<<<<<<<<<<<< recheck

                cxcore.CvResetImageROI(ref mhi);
                cxcore.CvResetImageROI(ref orient);
                cxcore.CvResetImageROI(ref mask);
                cxcore.CvResetImageROI(ref silh);

                //check for the case of little motion
                if (count < comp_rect.width * comp_rect.height * 0.05)
                    continue;

                //draw a clock with arrow indicating the direction
                center = new CvPoint((comp_rect.x + comp_rect.width / 2), (comp_rect.y + comp_rect.height / 2));

                cxcore.CvCircle(ref imgDst, center, cxcore.CvRound(magnitude * 1.2), color, 3, cxcore.CV_AA, 0);
                cxcore.CvLine(ref imgDst, center,
                    new CvPoint(cxcore.CvRound(center.x + magnitude * Math.Cos(angle * Math.PI / 180)),
                    cxcore.CvRound(center.y - magnitude * Math.Sin(angle * Math.PI / 180))),
                    color, 3, cxcore.CV_AA, 0);
            }
        }
Esempio n. 41
0
        /// <summary>
        /// Classical Multidimensional Scaling
        /// </summary>
        public MDS()
        {
            // creates distance matrix
            int   size = CityDistance.GetLength(0);
            CvMat t    = new CvMat(size, size, MatrixType.F64C1, CityDistance);

            // adds Torgerson's additive constant to t
            t += Torgerson(t);
            // squares all elements of t
            t.Mul(t, t);

            // centering matrix G
            CvMat g = CenteringMatrix(size);
            // calculates inner product matrix B
            CvMat b = g * t * g.T() * -0.5;
            // calculates eigenvalues and eigenvectors of B
            CvMat vectors = new CvMat(size, size, MatrixType.F64C1);
            CvMat values  = new CvMat(size, 1, MatrixType.F64C1);

            Cv.EigenVV(b, vectors, values);

            for (int r = 0; r < values.Rows; r++)
            {
                if (values[r] < 0)
                {
                    values[r] = 0;
                }
            }

            // multiplies sqrt(eigenvalue) by eigenvector
            CvMat result = vectors.GetRows(0, 2);

            for (int r = 0; r < result.Rows; r++)
            {
                for (int c = 0; c < result.Cols; c++)
                {
                    result[r, c] *= Math.Sqrt(values[r]);
                }
            }

            // scaling
            Cv.Normalize(result, result, 0, 800, NormType.MinMax);

            //Console.WriteLine(vectors);
            //Console.WriteLine(values);
            //Console.WriteLine(result);

            // opens a window
            using (IplImage img = new IplImage(800, 600, BitDepth.U8, 3))
                using (CvFont font = new CvFont(FontFace.HersheySimplex, 0.5f, 0.5f))
                    using (CvWindow window = new CvWindow("City Location Estimation"))
                    {
                        img.Zero();
                        for (int c = 0; c < size; c++)
                        {
                            double x = result[0, c];
                            double y = result[1, c];
                            x = x * 0.7 + img.Width * 0.1;
                            y = y * 0.7 + img.Height * 0.1;
                            img.Circle((int)x, (int)y, 5, CvColor.Red, -1);
                            img.PutText(CityNames[c], new CvPoint((int)x + 5, (int)y + 10), font, CvColor.White);
                        }
                        window.Image = img;
                        Cv.WaitKey();
                    }
        }
Esempio n. 42
0
        public OpticalFlowBM()
        {
            // cvCalcOpticalFlowBM
            // ブロックマッチングによるオプティカルフローの計算

            const int BlockSize = 16;
            const int ShiftSize = 8;
            const int Range     = 32;

            CvSize blockSize = new CvSize(BlockSize, BlockSize);
            CvSize shiftSize = new CvSize(ShiftSize, ShiftSize);
            CvSize maxRange  = new CvSize(Range, Range);

            using (IplImage srcPrev = Cv.LoadImage(Const.ImagePenguin1, LoadMode.GrayScale))
                using (IplImage srcCurr = Cv.LoadImage(Const.ImagePenguin2, LoadMode.GrayScale))
                    using (IplImage dst = Cv.LoadImage(Const.ImagePenguin2, LoadMode.Color))
                    {
                        // (1)速度ベクトルを格納する構造体の確保
                        CvSize velSize = new CvSize
                        {
                            Width  = (srcPrev.Width - blockSize.Width + shiftSize.Width) / shiftSize.Width,
                            Height = (srcPrev.Height - blockSize.Height + shiftSize.Height) / shiftSize.Height
                        };
                        using (CvMat velx = Cv.CreateMat(velSize.Height, velSize.Width, MatrixType.F32C1))
                            using (CvMat vely = Cv.CreateMat(velSize.Height, velSize.Width, MatrixType.F32C1))
                            {
                                /*if (!CV_ARE_SIZES_EQ(srcA, srcB) ||
                                *   !CV_ARE_SIZES_EQ(velx, vely) ||
                                *   velx->width != velSize.width ||
                                *   vely->height != velSize.height)
                                *   CV_Error(CV_StsUnmatchedSizes, "");*/
                                if (srcPrev.Size != srcCurr.Size)
                                {
                                    throw new Exception();
                                }
                                if (velx.Width != vely.Width)
                                {
                                    throw new Exception();
                                }
                                if (velx.Height != vely.Height)
                                {
                                    throw new Exception();
                                }
                                if (velx.Cols != velSize.Width)
                                {
                                    throw new Exception();
                                }
                                if (vely.Rows != velSize.Height)
                                {
                                    throw new Exception();
                                }

                                Cv.SetZero(velx);
                                Cv.SetZero(vely);
                                // (2)オプティカルフローの計算
                                Cv.CalcOpticalFlowBM(srcPrev, srcCurr, blockSize, shiftSize, maxRange, false, velx, vely);
                                // (3)計算されたフローを描画
                                for (int r = 0; r < velx.Rows; r++)
                                {
                                    for (int c = 0; c < vely.Cols; c++)
                                    {
                                        int dx = (int)Cv.GetReal2D(velx, r, c);
                                        int dy = (int)Cv.GetReal2D(vely, r, c);
                                        //Console.WriteLine("i:{0} j:{1} dx:{2} dy:{3}", i, j, dx, dy);
                                        if (dx != 0 || dy != 0)
                                        {
                                            CvPoint p1 = new CvPoint(c * ShiftSize, r * ShiftSize);
                                            CvPoint p2 = new CvPoint(c * ShiftSize + dx, r * ShiftSize + dy);
                                            Cv.Line(dst, p1, p2, CvColor.Red, 1, LineType.AntiAlias, 0);
                                        }
                                    }
                                }

                                using (CvWindow windowPrev = new CvWindow("prev", srcPrev))
                                    using (CvWindow windowCurr = new CvWindow("curr", srcCurr))
                                        using (CvWindow windowDst = new CvWindow("dst", dst))
                                        //using (CvWindow windowVelX = new CvWindow("velx", velx))
                                        //using (CvWindow windowVelY = new CvWindow("vely", vely))
                                        {
                                            Cv.WaitKey(0);
                                        }
                            }
                    }
        }
Esempio n. 43
0
 /// <summary>
 /// ビットマップから作成
 /// </summary>
 /// <param name="bitmap"></param>
 public _7SegMatrix(Bitmap bitmap)
 {
     mImageFull = BitmapConverter.ToIplImage(bitmap);
     convertFullToGray();
     convertGrayToBin();
 }
        void TestMode()
        {
            if (changed == 1)
            {
                changed = 0;
            }

            SaveMenu.Enabled = true;
            IplImage testimage = cvlib.ToIplImage(ImageViewer.Properties.Resources.TestModeImage, false);

            newImage = normalImage = changeChannelImage = testimage;
            previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;

            DialogResult result2 = MessageBox.Show("Test Red Channel, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

            if (result2 != System.Windows.Forms.DialogResult.No)
            {
                changechannel(2);
                changed = 0;

                DialogResult result3 = MessageBox.Show("Test Green Channel, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

                if (result3 != System.Windows.Forms.DialogResult.No)
                {
                    IplImage testimage3 = cvlib.ToIplImage(ImageViewer.Properties.Resources.TestModeImage, false);
                    newImage = testimage3;
                    changechannel(1);
                    changed = 0;

                    DialogResult result4 = MessageBox.Show("Test Blue Channel, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

                    if (result4 != System.Windows.Forms.DialogResult.No)
                    {
                        IplImage testimage4 = cvlib.ToIplImage(ImageViewer.Properties.Resources.TestModeImage, false);
                        newImage = testimage4;
                        changechannel(0);
                        changed = 0;

                        DialogResult result7 = MessageBox.Show("Test Grayscale Channel, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

                        if (result7 != System.Windows.Forms.DialogResult.No)
                        {
                            GrayscaleChannelMenu.PerformClick();

                            DialogResult result5 = MessageBox.Show("Test Horizontal Flip, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

                            if (result5 != System.Windows.Forms.DialogResult.No)
                            {
                                //To Flip a new version of the test image

                                /*
                                 * IplImage testimage5 = cvlib.ToIplImage(ImageViewer.Properties.Resources.TestModeImage, false);
                                 * newimage = testimage5;
                                 */

                                FlipImage(1);
                                DialogResult result6 = MessageBox.Show("Test Vertical Flip, Continue ?", "ImageViewer", MessageBoxButtons.YesNo, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);

                                if (result6 != System.Windows.Forms.DialogResult.No)
                                {
                                    FlipImage(0);

                                    DialogResult result8 = MessageBox.Show("At last Test Image Copy to the Clipboard, Continue ?", "ImageViewer", MessageBoxButtons.OKCancel, MessageBoxIcon.Information, MessageBoxDefaultButton.Button1);
                                    if (result8 == System.Windows.Forms.DialogResult.OK)
                                    {
                                        CopyImageMenu.PerformClick();
                                    }
                                    else
                                    {
                                        previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                                    }
                                }
                                else
                                {
                                    previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                                }
                            }
                            else
                            {
                                previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                            }
                        }
                        else
                        {
                            previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                        }
                    }
                    else
                    {
                        previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                    }
                }
                else
                {
                    previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
                }
            }
            else
            {
                previewbox.BackgroundImage = ImageViewer.Properties.Resources.TestModeImage;
            }
        }
Esempio n. 45
0
 /// <summary>
 /// 二値画像(8bit)を7セグ画像(8bit)に変換する
 /// mImageGray --> mImageBin, m7SegPattern
 /// </summary>
 /// <param name="threshold">閾値(0-100)</param>
 private void convertBinTo7Seg(int threshold)
 {
     mImage7Seg = mImageBin.Clone();
     mImage7Seg.Zero();
     m7SegPattern = match7SegMatrix(mImageBin, mImage7Seg, threshold);
 }
Esempio n. 46
0
 //---------------------------------------------------------
 // 関数名 : getCameraImage
 // 機能   : カメラ画像の取得
 // 引数   : なし
 // 戻り値 : img/カメラ画像
 //---------------------------------------------------------
 public IplImage getCameraImage()
 {
     video.Read(m_img);
     i_img = convertMatToIplImage(m_img);
     return(i_img);
 }
Esempio n. 47
0
 public Plate(IplImage pImage, Boolean isCopy)
 {
     image = pImage.Clone();
 }
Esempio n. 48
0
        byte[] m7SegPattern = null;                                     // 7セグパターン配列

        /// <summary>
        /// ファイル名から作成
        /// </summary>
        /// <param name="fileName"></param>
        public _7SegMatrix(string fileName)
        {
            mImageFull = new IplImage(fileName);
            convertFullToGray();
            convertGrayToBin();
        }
Esempio n. 49
0
        private List <ObjRect> DetectEyesInRegion(IplImage img, CvMemStorage storage, CvRect region)
        {
            List <ObjRect> eyes = new List <ObjRect>();

            //Split the region into two overlapping rectangles
            CvRect leftEye = region;

            leftEye.Width = (int)(leftEye.Width * 0.6);

            CvRect rightEye = region;

            rightEye.Width = (int)(rightEye.Width * 0.6);
            rightEye.X    += (int)(region.Width * 0.4);

            //If the eye pair or face is small enough, use 3 instead of 5
            int    minEyeLength = region.Width < 80 ? 3 : 5;
            CvSize minEyeSize   = new CvSize(minEyeLength, minEyeLength);

            List <object[]> vars = new List <object[]>();

            vars.Add(new object[] { 0, 3, 0.5f });
            vars.Add(new object[] { 0, 3, 0.7f });
            vars.Add(new object[] { 0, 3, 1.0f });
            vars.Add(new object[] { 0, 2, 0.5f });
            vars.Add(new object[] { 0, 2, 0.7f });
            vars.Add(new object[] { 0, 2, 1.0f });
            vars.Add(new object[] { 0, 1, 0.5f });
            vars.Add(new object[] { 0, 1, 0.7f });
            vars.Add(new object[] { 1, 1, 1.0f });
            vars.Add(new object[] { 1, 1, 0.5f });
            vars.Add(new object[] { 1, 1, 0.7f });
            vars.Add(new object[] { 1, 1, 1.0f });

            bool foundLeft = false, foundRight = false;

            foreach (object[] vals in vars)
            {
                CvRect left = leftEye;
                left.Y     += (int)((float)left.Height * (float)vals[2] / 2.0);
                left.Height = (int)((float)left.Height * (float)vals[2]);
                CvRect right = rightEye;
                right.Height = left.Height;
                right.Y      = left.Y;

                if (!foundLeft)
                {
                    //Search for eyes
                    storage.Clear();
                    img.SetROI(left);
                    CvAvgComp[] leyes = BorrowCascade((int)vals[0] == 0 ? ("RightEyeCascade") : ("Eye"), c => Cv.HaarDetectObjects(img, c, storage, 1.0850, (int)vals[1], 0, minEyeSize, new CvSize(0, 0)).ToArrayAndDispose());
                    //Array.Sort<CvAvgComp>(leyes, CompareByNeighbors);

                    if (leyes.Length > 0)
                    {
                        eyes.Add(new ObjRect(leyes[0].Rect.Offset(left.Location).ToRectangleF(), FeatureType.Eye));
                        minEyeSize = new CvSize(leyes[0].Rect.Width / 4, leyes[0].Rect.Width / 4);
                        foundLeft  = true;
                    }
                }

                if (!foundRight)
                {
                    storage.Clear();
                    img.SetROI(right);
                    CvAvgComp[] reyes = BorrowCascade((int)vals[0] == 0 ? ("LeftEyeCascade") : ("Eye"), c => Cv.HaarDetectObjects(img, c, storage, 1.0850, (int)vals[1], 0, minEyeSize, new CvSize(0, 0)).ToArrayAndDispose());
                    //Array.Sort<CvAvgComp>(reyes, CompareByNeighbors);

                    if (reyes.Length > 0)
                    {
                        eyes.Add(new ObjRect(reyes[0].Rect.Offset(right.Location).ToRectangleF(), FeatureType.Eye));
                        minEyeSize = new CvSize(reyes[0].Rect.Width / 4, reyes[0].Rect.Width / 4);
                        foundRight = true;
                    }
                }
                if (foundLeft && foundRight)
                {
                    break;
                }
            }
            return(eyes);
        }
Esempio n. 50
0
 /// <summary>
 /// グレースケール画像(8bit)を二値画像(8bit)に変換する
 /// mImageGray --> mImageBin
 /// </summary>
 private void convertGrayToBin()
 {
     mImageBin = mImageGray.Clone();
     Cv.Threshold(mImageGray, mImageBin, 0, 255, ThresholdType.Otsu);
 }
Esempio n. 51
0
        /*
        #region GetContour
        /// <summary>
        /// Get the contour of a blob.
        /// Uses Theo Pavlidis' algorithm (see http://www.imageprocessingplace.com/downloads_V3/root_downloads/tutorials/contour_tracing_Abeer_George_Ghuneim/theo.html ).
        /// </summary>
        /// <param name="img">Label image.</param>
        /// <returns>Chain code contour.</returns>
        public CvContourChainCode GetContour(IplImage img)
        {
            return CvBlobLib.GetContour(this, img);
        }
        #endregion
        //*/
        #region MeanColor
        /// <summary>
        /// Calculates mean color of a blob in an image.
        /// </summary>
        /// <param name="imgLabel">Image of labels.</param>
        /// <param name="img">Original image.</param>
        /// <returns>Average color.</returns>
        public CvScalar MeanColor(IplImage imgLabel, IplImage img)
        {
            if (IsDisposed)
                throw new ObjectDisposedException("CvBlob");

            return CvBlobLib.BlobMeanColor(this, imgLabel, img);
        }
        public static void UpdateTexture(int texture, PixelInternalFormat internalFormat, IplImage image)
        {
            if (image == null)
            {
                throw new ArgumentNullException("image");
            }
            PixelFormat pixelFormat;

            switch (image.Channels)
            {
            case 1: pixelFormat = PixelFormat.Luminance; break;

            case 2: pixelFormat = PixelFormat.Rg; break;

            case 3: pixelFormat = PixelFormat.Bgr; break;

            case 4: pixelFormat = PixelFormat.Bgra; break;

            default: throw new ArgumentException("Image has an unsupported number of channels.", "image");
            }

            int       pixelSize;
            PixelType pixelType;

            switch (image.Depth)
            {
            case IplDepth.U8:
                pixelSize = 1;
                pixelType = PixelType.UnsignedByte;
                break;

            case IplDepth.S8:
                pixelSize = 1;
                pixelType = PixelType.Byte;
                break;

            case IplDepth.U16:
                pixelSize = 2;
                pixelType = PixelType.UnsignedShort;
                break;

            case IplDepth.S16:
                pixelSize = 2;
                pixelType = PixelType.Short;
                break;

            case IplDepth.S32:
                pixelSize = 4;
                pixelType = PixelType.Int;
                break;

            case IplDepth.F32:
                pixelSize = 4;
                pixelType = PixelType.Float;
                break;

            default: throw new ArgumentException("Image has an unsupported pixel bit depth.", "image");
            }

            GL.BindTexture(TextureTarget.Texture2D, texture);
            GL.PixelStore(PixelStoreParameter.UnpackAlignment, image.WidthStep % 4 == 0 ? 4 : 1);
            GL.PixelStore(PixelStoreParameter.UnpackRowLength, image.WidthStep / (pixelSize * image.Channels));
            GL.TexImage2D(TextureTarget.Texture2D, 0, internalFormat, image.Width, image.Height, 0, pixelFormat, pixelType, image.ImageData);
            GC.KeepAlive(image);
        }
Esempio n. 53
0
        /// <summary>
        /// 指定したIplImageを表示する
        /// </summary>
        /// <param name="image"></param>
#else
        /// <summary>
        /// Shows the image in this window
        /// </summary>
        /// <param name="image">Image to be shown. </param>
#endif
        public void ShowImage(IplImage image)
        {
            Image = image;
        }
Esempio n. 54
0
        static Func <IManagedImage, IplImage> GetConverter(PixelFormatEnums pixelFormat, ColorProcessingAlgorithm colorProcessing)
        {
            int      outputChannels;
            IplDepth outputDepth;

            if (pixelFormat < PixelFormatEnums.BayerGR8 || pixelFormat == PixelFormatEnums.BGR8 ||
                pixelFormat <= PixelFormatEnums.BayerBG16 && colorProcessing == ColorProcessingAlgorithm.NoColorProcessing)
            {
                if (pixelFormat == PixelFormatEnums.BGR8)
                {
                    outputChannels = 3;
                    outputDepth    = IplDepth.U8;
                }
                else
                {
                    outputChannels = 1;
                    var depthFactor = (int)pixelFormat;
                    if (pixelFormat > PixelFormatEnums.Mono16)
                    {
                        depthFactor = (depthFactor - 3) / 4;
                    }
                    outputDepth = (IplDepth)(8 * (depthFactor + 1));
                }

                return(image =>
                {
                    var width = (int)image.Width;
                    var height = (int)image.Height;
                    using (var bitmapHeader = new IplImage(new Size(width, height), outputDepth, outputChannels, image.DataPtr))
                    {
                        var output = new IplImage(bitmapHeader.Size, outputDepth, outputChannels);
                        CV.Copy(bitmapHeader, output);
                        return output;
                    }
                });
            }

            PixelFormatEnums outputFormat;

            if (pixelFormat == PixelFormatEnums.Mono12p ||
                pixelFormat == PixelFormatEnums.Mono12Packed)
            {
                outputFormat   = PixelFormatEnums.Mono16;
                outputDepth    = IplDepth.U16;
                outputChannels = 1;
            }
            else if (pixelFormat >= PixelFormatEnums.BayerGR8 && pixelFormat <= PixelFormatEnums.BayerBG16)
            {
                outputFormat   = PixelFormatEnums.BGR8;
                outputDepth    = IplDepth.U8;
                outputChannels = 3;
            }
            else
            {
                throw new InvalidOperationException(string.Format("Unable to convert pixel format {0}.", pixelFormat));
            }

            return(image =>
            {
                var width = (int)image.Width;
                var height = (int)image.Height;
                var output = new IplImage(new Size(width, height), outputDepth, outputChannels);
                unsafe
                {
                    using (var destination = new ManagedImage((uint)width, (uint)height, 0, 0, outputFormat, output.ImageData.ToPointer()))
                    {
                        image.Convert(destination, outputFormat, (SpinnakerNET.ColorProcessingAlgorithm)colorProcessing);
                        return output;
                    }
                }
            });
        }
Esempio n. 55
0
        /// <summary>
        /// 初期化
        /// </summary>
        /// <param name="image"></param>
        /// <param name="sizeMode"></param>
#else
        /// <summary>
        /// Constructor
        /// </summary>
        /// <param name="image"></param>
        /// <param name="sizeMode"></param>
#endif
        public CvWindowEx(IplImage image, PictureBoxSizeMode sizeMode)
            : this()
        {
            Image = image;
            _pictureBox.SizeMode = sizeMode;
        }
Esempio n. 56
0
 private IplImage CutTopBottom(IplImage origin, PlateVerticalGraph graph)
 {
     graph.ApplyProbabilityDistributor(new Graph.ProbabilityDistributor(0.0, 0.0, 2, 2));
     Graph.Peak p = graph.FindPeak(3)[0];
     return(GetSubImage(origin, new CvRect(0, p.Left, origin.Width, p.GetDiff)));
 }
Esempio n. 57
0
 /// <summary>
 /// update object histogram
 /// </summary>
 /// <param name="curFrame"></param>
 /// <returns></returns>
 public virtual bool UpdateHistogram(IplImage curFrame)
 {
     if (disposed)
         throw new ObjectDisposedException("CvCamShiftTracker");
     if (curFrame == null)
         throw new ArgumentNullException("curFrame");
     return NativeMethods.legacy_CvCamShiftTracker_update_histogram(ptr, curFrame.CvPtr) != 0;
 }
Esempio n. 58
0
 public Plate()
 {
     image = null; plateCopy = null;
 }
Esempio n. 59
0
 /// <summary>
 /// 
 /// </summary>
 public void DebugShow()
 {
     using (IplImage img = new IplImage(Cols, Rows, BitDepth.U8, 1))
     {
         img.Zero();
         for (int r = 0; r < Rows; r++)
         {
             for (int c = 0; c < Cols; c++)
             {
                 if (Values[r, c] != 0)
                     img[r, c] = 255;
             }
         }
         CvWindow.ShowImages(img);
     }
 }
Esempio n. 60
0
 public Plate(IplImage pImage)
 {
     image           = pImage.Clone();
     plateCopy       = new Plate(image, true);
     plateCopy.image = AdaptiveThresholding(plateCopy.image);
 }