Exemplo n.º 1
0
    // Update is called once per frame
    void Update()
    {
        if (!_ready)
        {
            return;
        }

        transform.rotation = baseRotation * Quaternion.AngleAxis(webCamTexture.videoRotationAngle, Vector3.up);

        int detectedFaceCount = 0;

        unsafe
        {
            fixed(OpenCVWrapper.CvCircle *outFaces = _faces)
            {
                OpenCVWrapper.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }

        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
        }
    }
Exemplo n.º 2
0
 public void Dispose()
 {
     if (capture != IntPtr.Zero)
     {
         OpenCVWrapper.cvReleaseCapture(ref capture);
     }
 }
Exemplo n.º 3
0
 private void OnApplicationQuit()
 {
     if (_ready)
     {
         OpenCVWrapper.Close();
     }
 }
Exemplo n.º 4
0
    // Use this for initialization
    void Start()
    {
        int camWidth = 0, camHeight = 0;

        webCamTexture = new WebCamTexture();
        Renderer renderer = GetComponent <Renderer>();

        renderer.material.mainTexture = webCamTexture;
        baseRotation = transform.rotation;
        webCamTexture.Play();
        camWidth  = webCamTexture.width;
        camHeight = webCamTexture.height;

        int result = OpenCVWrapper.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascasdes definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            return;
        }

        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new OpenCVWrapper.CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVWrapper.SetScale(DetectionDownScale);
        _ready = true;
    }
Exemplo n.º 5
0
        static void Main(string[] args)
        {
            while (true)
            {
                Console.WriteLine("Press f for face recognition\nPress t for object tracking\nPress r for text recognition\nPress h for hand recognition\nPress q for QR Code\nPress e for exit");
                var key = Console.ReadKey();
                Console.WriteLine();

                try
                {
                    if (key.Key == ConsoleKey.F)
                    {
                        Console.WriteLine("Enter Image Path: ");
                        var path = Console.ReadLine();
                        Console.WriteLine($"Found faces: {OpenCVWrapper.RecognizeFaces(path)}");
                        var result = path == null ? $"{Directory.GetCurrentDirectory()}/result.png" : $"{path}result.{path.Split('.').Last()}";
                        Console.WriteLine($"Resulting image saved as {result}");
                    }
                    else if (key.Key == ConsoleKey.R)
                    {
                        Console.WriteLine("Enter Image Path: ");
                        var path = Console.ReadLine();
                        Console.WriteLine($"Text:\n {OpenCVWrapper.ReadText(path)}");
                    }
                    else if (key.Key == ConsoleKey.T)
                    {
                        OpenCVWrapper.TrackObject();
                    }
                    else if (key.Key == ConsoleKey.H)
                    {
                        OpenCVWrapper.DetectHand();
                    }
                    else if (key.Key == ConsoleKey.Q)
                    {
                        Console.WriteLine("Enter Image Path: ");
                        var path = Console.ReadLine();
                        Console.WriteLine($"Result:\n {OpenCVWrapper.DecodeQR(path)}");
                    }
                    else if (key.Key == ConsoleKey.E)
                    {
                        break;
                    }
                    Console.WriteLine("Press any key");
                    Console.ReadKey();
                }
                catch (Exception ex)
                {
                    Console.WriteLine($"Error: {ex.Message}");
                    Console.WriteLine("Press any key");
                    Console.ReadKey();
                }
            }
        }
Exemplo n.º 6
0
        public ImageMatching()
        {
            InitializeComponent();

            opencvwrapper = new OpenCVWrapper();

            thinningopencvwrapper = new ThinningOpenCVWrapper();

            ProcessedImagePictureEdit.ContextMenu = new ContextMenu();

            //ArduinoSerialPort.PortName = "COM4";

            //ArduinoSerialPort.BaudRate = 9600;
        }
Exemplo n.º 7
0
        public void GetImageTexture(int[] returnImage, ref IntPtr imagePtr)
        {
            if (OpenCVWrapper.cvGrabFrame(capture) != 0)
            {
                failureCount = 0;

                IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture);

                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr,
                                                                                                   typeof(OpenCVWrapper.IplImage));

                bool replaceBackground = false;
                if (imageReadyCallback != null)
                {
                    replaceBackground = imageReadyCallback(ptr, returnImage);
                }

                if (!replaceBackground && (returnImage != null))
                {
                    unsafe
                    {
                        byte *src   = (byte *)videoImage.imageData;
                        int   index = 0;
                        for (int i = 0; i < videoImage.height; i++)
                        {
                            for (int j = 0; j < videoImage.width * videoImage.nChannels; j += videoImage.nChannels)
                            {
                                returnImage[index++] = (int)((*(src) << 16) | (*(src + 1) << 8) | *(src + 2));
                                src += videoImage.nChannels;
                            }
                        }
                    }
                }

                if (imagePtr != IntPtr.Zero)
                {
                    imagePtr = videoImage.imageData;
                }
            }
            else
            {
                failureCount++;

                if (failureCount > FAILURE_THRESHOLD)
                {
                    throw new GoblinException("Video capture device ID: is used by " +
                                              "other application, and can not be accessed");
                }
            }
        }
Exemplo n.º 8
0
        public void GetPaperFromImageTest()
        {
            foreach (string image in Images)
            {
                //Gets rectangles
                Point[][] rectangles = OpenCVWrapper.IdentifyRectangles($"Images//{image}.jpg");
                //Checks valid json was output
                Assert.IsNotNull(rectangles);
                //Checks at least 1 rectangle was identified
                Assert.AreNotEqual(rectangles.Length, 0);

                Bitmap bitmap = new Bitmap($"Images//{image}.jpg");
                //Get rectangle that corresponds to the paper (I hope)
                Point[] paper = bitmap.IdentifyPaperCorners(rectangles);
                Assert.IsNotNull(paper);

                //Draws all points on the image for manual checking
                Bitmap tempBitmap = new Bitmap(bitmap);
                foreach (Point corner in paper)
                {
                    for (int xOff = -2; xOff < 3; xOff++)
                    {
                        for (int yOff = -2; yOff < 3; yOff++)
                        {
                            if (corner.X + xOff < tempBitmap.Width && corner.Y + yOff < tempBitmap.Height &&
                                corner.X + xOff >= 0 &&
                                corner.Y + yOff >= 0)
                            {
                                tempBitmap.SetPixel(corner.X + xOff, corner.Y + yOff, Color.Red);
                            }
                        }
                    }
                }

                //Save modified image
                if (!Directory.Exists($"Images/Out/{image}"))
                {
                    Directory.CreateDirectory($"Images/Out/{image}");
                }
                tempBitmap.Save($"Images/Out/{image}/1 corners.png", ImageFormat.Png);

                //Transforms and saves image for manual checking
                bitmap = bitmap.PerspectiveTransformImage(paper, 1414, 1000);
                bitmap.Save($"Images/Out/{image}/2 transform.png", ImageFormat.Png);
            }
        }
Exemplo n.º 9
0
        public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution,
                                     ImageFormat format, bool grayscale)
        {
            if (cameraInitialized)
            {
                return;
            }

            this.resolution    = resolution;
            this.grayscale     = grayscale;
            this.frameRate     = framerate;
            this.videoDeviceID = videoDeviceID;
            this.format        = format;

            switch (resolution)
            {
            case Resolution._160x120:
                cameraWidth  = 160;
                cameraHeight = 120;
                break;

            case Resolution._320x240:
                cameraWidth  = 320;
                cameraHeight = 240;
                break;

            case Resolution._640x480:
                cameraWidth  = 640;
                cameraHeight = 480;
                break;

            case Resolution._800x600:
                cameraWidth  = 800;
                cameraHeight = 600;
                break;

            case Resolution._1024x768:
                cameraWidth  = 1024;
                cameraHeight = 768;
                break;

            case Resolution._1280x1024:
                cameraWidth  = 1280;
                cameraHeight = 1024;
                break;

            case Resolution._1600x1200:
                cameraWidth  = 1600;
                cameraHeight = 1200;
                break;
            }

            capture = OpenCVWrapper.cvCaptureFromCAM(videoDeviceID);

            if (capture == IntPtr.Zero)
            {
                throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range.");
            }

            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_WIDTH, cameraWidth);
            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FRAME_HEIGHT, cameraHeight);

            double frame_rate = 0;

            switch (frameRate)
            {
            case FrameRate._15Hz: frame_rate = 15; break;

            case FrameRate._30Hz: frame_rate = 30; break;

            case FrameRate._50Hz: frame_rate = 50; break;

            case FrameRate._60Hz: frame_rate = 60; break;

            case FrameRate._120Hz: frame_rate = 120; break;

            case FrameRate._240Hz: frame_rate = 240; break;
            }

            OpenCVWrapper.cvSetCaptureProperty(capture, OpenCVWrapper.CV_CAP_PROP_FPS, frame_rate);

            // Grab the video image to see if resolution is correct
            if (OpenCVWrapper.cvGrabFrame(capture) != 0)
            {
                IntPtr ptr = OpenCVWrapper.cvRetrieveFrame(capture);

                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(ptr,
                                                                                                   typeof(OpenCVWrapper.IplImage));

                if (videoImage.width != cameraWidth || videoImage.height != cameraHeight)
                {
                    throw new GoblinException("Resolution " + cameraWidth + "x" + cameraHeight +
                                              " is not supported");
                }
            }

            cameraInitialized = true;
        }
Exemplo n.º 10
0
        private void SetupCaptureDevices()
        {
            // Create our video capture device that uses OpenCV library.
            OpenCVCapture captureDevice = new OpenCVCapture();
            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                ImageFormat.R8G8B8_24, false);

            // Create a 16bit color texture that contains the image processed by OpenCV
            // We're using alpha gray texture because we want the black color to represent
            // transparent color so that we can overlay the texture properly on top of the live
            // video image
            overlayTexture = new Texture2D(GraphicsDevice, captureDevice.Width, captureDevice.Height,
                false, SurfaceFormat.Bgra4444);
            // Create an array that will contain the image data of overlayTexture
            overlayData = new short[overlayTexture.Width * overlayTexture.Height];

            // Assigns a callback function to be called whenever a new video frame is captured
            captureDevice.CaptureCallback = delegate(IntPtr image, int[] background)
            {
                // Creates a holder for an OpenCV image
                IntPtr grayImage = OpenCVWrapper.cvCreateImage(OpenCVWrapper.cvGetSize(image), 8, 1);

                // Converts the color image (live video image) to a gray image
                OpenCVWrapper.cvCvtColor(image, grayImage, OpenCVWrapper.CV_BGR2GRAY);

                // Performs canny edge detection on the gray image
                OpenCVWrapper.cvCanny(grayImage, grayImage, 10, 200, 3);

                // Converts the gray image pointer to IplImage structure so that we can access
                // the image data of the processed gray image
                OpenCVWrapper.IplImage videoImage = (OpenCVWrapper.IplImage)Marshal.PtrToStructure(grayImage,
                    typeof(OpenCVWrapper.IplImage));

                unsafe
                {
                    int index = 0;
                    // Gets a pointer to the first byte of the image data
                    byte* src = (byte*)videoImage.imageData;
                    // Iterates through the image pointer
                    for (int i = 0; i < videoImage.height; i++)
                    {
                        for (int j = 0; j < videoImage.width; j++)
                        {
                            // src data contains 8 bit gray scaled color, so we need to convert it
                            // to Rgba4444 format.
                            // We assign the black color to be totally transparent
                            overlayData[index++] = (short)((*(src) << 8) | (*(src)));
                            src++;
                        }
                    }
                }

                // Resets the texture assigned to the device (this is needed for XNA 3.1 since
                // they have a bug)
                GraphicsDevice.Textures[0] = null;
                // Assigns the image data to the overlay texture
                overlayTexture.SetData<short>(overlayData);

                // Deallocates the memory assigned to the OpenCV image
                OpenCVWrapper.cvReleaseImage(ref grayImage);

                // We don't want to modify the original video image, so we return false
                return false;
            };

            scene.AddVideoCaptureDevice(captureDevice);

            scene.ShowCameraImage = true;
        }
        public ImageProcessing()
        {
            InitializeComponent();

            opencvwrapper = new OpenCVWrapper();

            thinningopencvwrapper = new ThinningOpenCVWrapper();

            if (File.Exists(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\ResultImage.jpg"))
            {
                try
                {
                    File.Delete(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\ResultImage.jpg");
                }
                catch (IOException exception)
                {
                    MessageBox.Show(exception.Message);
                }
            }

            if (File.Exists(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\AdaptiveThreshold.jpg"))
            {
                try
                {
                    File.Delete(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\AdaptiveThreshold.jpg");
                }
                catch (IOException exception)
                {
                    MessageBox.Show(exception.Message);
                }
            }

            if (File.Exists(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\MultipliedImage.jpg"))
            {
                try
                {
                    File.Delete(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\MultipliedImage.jpg");
                }
                catch (IOException exception)
                {
                    MessageBox.Show(exception.Message);
                }
            }

            if (File.Exists(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\ThinnedImage.jpg"))
            {
                try
                {
                    File.Delete(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory\ThinnedImage.jpg");
                }
                catch (IOException exception)
                {
                    MessageBox.Show(exception.Message);
                }
            }

            if (Directory.Exists(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory"))
            {
                try
                {
                    Directory.Delete(@"D:\Eighth Semester\HandVeinPattern\RuntimeDirectory");
                }
                catch (IOException exception)
                {
                    MessageBox.Show(exception.Message);
                }
            }

            if (AutoProcessBarToggleSwitchItem.Checked == true)
            {
                PreProcessingRibbonPageGroup.Enabled      = false;
                ThresholdingRibbonPageGroup.Enabled       = false;
                WrapperProcessingRibbonPageGroup.Enabled  = false;
                PostProcessingRibbonPageGroup.Enabled     = false;
                SegmentationRibbonPageGroup.Enabled       = false;
                FeaturesExtractionRibbonPageGroup.Enabled = false;
                ProcessBarButtonItem.Enabled = true;
            }
            else
            {
                PreProcessingRibbonPageGroup.Enabled      = true;
                ThresholdingRibbonPageGroup.Enabled       = true;
                WrapperProcessingRibbonPageGroup.Enabled  = true;
                PostProcessingRibbonPageGroup.Enabled     = true;
                SegmentationRibbonPageGroup.Enabled       = true;
                FeaturesExtractionRibbonPageGroup.Enabled = true;
                ProcessBarButtonItem.Enabled = false;
            }
        }
Exemplo n.º 12
0
        public IActionResult Create(IFormCollection form)
        {
            //Start of working directory
            string workingDirectory = Path.Combine(_workingDirectory, "Working ");
            // ReSharper disable once RedundantAssignment, needs to be there for non debug compile
            Bitmap initialImage = null;

#if !DEBUG
            try
            {
#endif

            #region Setup

            //Checks there is a file
            if (form.Files.Count == 0)
            {
                return(new BadRequestResult());
            }

            //Gets the id for this upload, locked so only one thread can enter at a time
            int id = SetupId(ref workingDirectory);
            //Cleans up Working folders that are leftover for some reason on every 20th pass
            if (id % 20 == 0)
            {
                Thread cleanWorkingDirectories = new Thread(CleanWorkingDirectories);
                cleanWorkingDirectories.Start();
            }
            //Saves the file sent and get if transformation is needed
            bool transform = ProcessFormData(form, workingDirectory);
            //Tries to load file sent as image and will return an UnsupportedMediaTypeResult if file can't be loaded to a bitmap
            try
            {
                //Tries to load the image
                initialImage = LoadImage(form, workingDirectory);
            }
            catch (Exception)
            {
                Directory.Delete(workingDirectory, true);
                return(new UnsupportedMediaTypeResult());
            }

            #endregion

            #region Image Manipulation

            //Scales image to be less than a certain number of pixels
            Bitmap scaledImage = ScaleImage(workingDirectory, initialImage, transform);
            Bitmap perspectiveImage;
            //Will only run this if transform flag has been checked
            if (transform)
            {
                //Finds possible rectangles with OpenCV
                Point[][] rectangles =
                    OpenCVWrapper.IdentifyRectangles($"\"{Path.Combine(workingDirectory, "scaled.png")}\"");
                if (rectangles == null || rectangles.Length == 0)
                {
                    initialImage.Dispose();
                    Directory.Delete(workingDirectory, true);
                    return(new StatusCodeResult((int)HttpStatusCode.InternalServerError));
                }

#if DEBUG
                Bitmap temp = Debug.DrawPoints(scaledImage, rectangles);
                temp.Save(Path.Combine(_workingDirectory, "Debug", "3 Rectangles.png"), ImageFormat.Png);
#endif

                //Finds the correct rectangle
                Point[] paper = scaledImage.IdentifyPaperCorners(rectangles);
                if (paper == null || paper.Length != 4)
                {
                    initialImage.Dispose();
                    Directory.Delete(workingDirectory, true);
                    return(new StatusCodeResult((int)HttpStatusCode.InternalServerError));
                }

#if DEBUG
                temp = Debug.DrawPoints(scaledImage, paper);
                temp.Save(Path.Combine(_workingDirectory, "Debug", "4 Paper.png"), ImageFormat.Png);
#endif

                perspectiveImage = TransformImage(scaledImage, paper);
            }
            else
            {
                perspectiveImage = scaledImage;
            }

            #endregion

            #region Color Identification

            //Gets threshold array for image
            bool[][] threshold = CreateThresholds(perspectiveImage);
            //Thins lines
            ThinLines(threshold);

            #endregion

            #region Line Identification

            //Finds lines
            List <List <PointF> > lineParts = threshold.CreateLineParts();
            //Reduces number of points in lines
            lineParts.ReduceLines();
            //Finds loops
            List <List <PointF> > loops = lineParts.CreateLoops();
            //Joins remaining lines
            lineParts.ConnectLines();
            //Create line objects
            List <Line> lines = LineCreation.CreateLineObjects(lineParts, loops, perspectiveImage);

            #endregion

            #region Map Creation

            //Creates a map
            Map map = new Map
            {
                Id    = id,
                Lines = lines,
                Ratio = (double)perspectiveImage.Width / perspectiveImage.Height
            };

            //Converts map to json
            string json = JsonConvert.SerializeObject(map).Replace("\"IsEmpty\":false,", "");

            SaveMap(id, json);

            #endregion

            initialImage.Dispose();
            Directory.Delete(workingDirectory, true);

            //Returns map
            return(new ObjectResult(json)
            {
                //Sets the media type to be json instead of string
                ContentTypes = new MediaTypeCollection
                {
                    "application/json",
                    "charset=utf-8"
                }
            });

#if !DEBUG
        }

        catch
        {
            initialImage?.Dispose();
            if (Directory.Exists(workingDirectory))
            {
                Directory.Delete(workingDirectory, true);
            }
            return(new StatusCodeResult((int)HttpStatusCode.InternalServerError));
        }
#endif
        }