Ejemplo n.º 1
0
 void OnApplicationQuit()
 {
     if (Init.camerasInitialized)
     {
         OpenCVInterop.CloseSDLCameras();
     }
 }
 void OnApplicationQuit()
 {
     if (_ready)
     {
         OpenCVInterop.Close();
     }
 }
Ejemplo n.º 3
0
    void Awake()
    {
        int result = OpenCVInterop.openCam(ref camWidth, ref camHeight, 0);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            return;
        }

        Debug.Log("Camera Resolution is:" + camWidth + " " + camHeight);

        hsvRange.minH            = 90F;
        hsvRange.maxH            = 150F;
        hsvRange.minS            = 0.15F;
        hsvRange.maxS            = 0.25F;
        thresholds.lowThreshold  = 0;
        thresholds.highThreshold = 30;
        handPos.X = 0;
        handPos.Y = 0;

        fingerTips   = new Position[maxFingerTipsCount];
        gestureCache = new GestureCache();

        furthestFingertip.X = 0;
        furthestFingertip.Y = 0;

        CameraResolution = new Vector2(camWidth, camHeight);
        _ready           = true;
    }
Ejemplo n.º 4
0
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            else if (result == -3)
            {
                Debug.LogWarningFormat("[{0}] First Frame Empty.", GetType());
            }
            else if (result == -4)
            {
                Debug.LogWarningFormat("[{0}] No ROI.", GetType());
            }
            return;
        }

        CameraResolution            = new Vector2(camWidth, camHeight);
        _tracking                   = new CvRectangle[_maxTrackingCount];
        NormalizedTrackingPositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
    // Update is called once per frame
    void Update()
    {
        //if no color data is passed, return
        if (PositionAtObjectScreenSpace.pixelPos.Count == 0)
        {
            return;
        }

        //get pixelX and PixelY positions from PositionAtObjectScreenSpace
        x = (int)(PositionAtObjectScreenSpace.pixelPos[0].x);
        y = (int)(PositionAtObjectScreenSpace.pixelPos[0].y);

        Debug.LogWarningFormat("(xPix, yPix): (" + x + "," + y + ")", GetType());

        //getting object color from dll based on the pixel values provided by PositionAtObjectScreenSpace
        int detectedObjectCount = 1;

        unsafe
        {
            fixed(CoOrdinates_Color *outObj = _object)
            {
                OpenCVInterop.ObtainColor(x, y, outObj);        //call to dll
            }
        }
        //reading color from dll
        for (int i = 0; i < detectedObjectCount; i++)
        {
            Debug.LogWarningFormat("calib RGB: (" + _object[i].R + "," + _object[i].G + "," + _object[i].B + ")", GetType());
            //set color passed through dll
            byte r = (byte)(_object[i].R);
            byte g = (byte)(_object[i].G);
            byte b = (byte)(_object[i].B);
            rnd.material.color = new Color32(r, g, b, (byte)1);
        }
    }
Ejemplo n.º 6
0
 public void openThresholdMenu()
 {
     thresholdMenu.GetComponentInChildren <Slider>().value = OpenCVInterop.getThreshLevel();
     mainMenu.gameObject.SetActive(false);
     thresholdMenu.gameObject.SetActive(true);
     adjustThreshLevel = true;
 }
Ejemplo n.º 7
0
    /*
     * void OnApplicationQuit()
     * {
     *  if (_ready)
     *  {
     *      Debug.LogWarningFormat("before closing application", GetType());
     *      OpenCVInterop.Close();
     *      Debug.LogWarningFormat("before closing application", GetType());
     *  }
     * }*/


    void Update()
    {
        if (!_ready)
        {
            return;
        }

        int detectedObjectCount = 1;

        unsafe
        {
            fixed(CoOrdinates_Color *outObj = _object)
            {
                OpenCVInterop.Detect(outObj);
            }
        }
        NormalizedObjPositions.Clear();
        objectColor.Clear();
        for (int i = 0; i < detectedObjectCount; i++)
        {
            float objX = (_object[i].X * DetectionDownScale) / CameraResolution.x;
            float objY = 1f - ((_object[i].Y * DetectionDownScale) / CameraResolution.y);

            NormalizedObjPositions.Add(new Vector2(objX, objY));
            objectColor.Add(new Vector3(_object[i].R, _object[i].G, _object[i].B));
        }
    }
Ejemplo n.º 8
0
 void MatToTexture2D()
 {
     OpenCVInterop.showValidSingleFrame(pixelPtr);
     //Update the Texture2D with array updated in C++
     tex.SetPixels32(pixel32);
     tex.Apply();
 }
Ejemplo n.º 9
0
    private bool Initialize()
    {
        cameraState = CameraState.connecting;
        //Debug.Log("Initialize OpenCV Face Detection");
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }
            lastError   = -4;
            cameraState = CameraState.disconnected;
            return(false);
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        lastError   = 0;
        cameraState = CameraState.connected;
        return(true);
    }
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }

        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector3>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 11
0
    public CvFrame GetFaceDetectionResults()
    {
        CvFrame cvf;
        int     detectedFaceCount = 0;

        unsafe
        {
            fixed(CvCircle *outFaces = _faces)
            {
                cvf = OpenCVInterop.Detect(outFaces, _maxFaceDetectCount, ref detectedFaceCount);
            }
        }

        NormalizedFacePositions.Clear();
        for (int i = 0; i < detectedFaceCount; i++)
        {
            NormalizedFacePositions.Add(new Vector2((_faces[i].X * DetectionDownScale) / CameraResolution.x, 1f - ((_faces[i].Y * DetectionDownScale) / CameraResolution.y)));
        }

        for (int i = 0; i < NormalizedFacePositions.Count; i++)
        {
            float x = NormalizedFacePositions[i].x;
            float y = NormalizedFacePositions[i].y;
            Debug.LogFormat("For face # {0} x = {1} and y = {2}", i, x, y); // output the face positions
        }
        return(cvf);
    }
Ejemplo n.º 12
0
    void Update()
    {
        // attempt to reinitialize if connection was lost last frame or is not present on start
        if (cameraState == CameraState.disconnected || lastError < 0)
        {
            bool initialized = Initialize();
            if (!initialized)
            {
                return;
            }
        }

        // this is the main call to the dll that does the face detection processing on an OpenCV frame
        // it returns the data needed to build output in Unity
        cvFrame = GetFaceDetectionResults();
        //Debug.LogFormat("from frame, x = {0}, y = {1}, bufferSize = {2}, memPtr = {3}, error = {4}", cvFrame.x, cvFrame.y, cvFrame.bufferSize, cvFrame.memPtr, cvFrame.error);

        // handle error cases returend by dll; i.e. disconnected usb device
        if (cvFrame.error < 0)
        {
            //Debug.LogFormat("RunFaceDetection returned error {0}", cvFrame.error);
            OpenCVInterop.Close();
            cameraState = CameraState.disconnected;
            lastError   = cvFrame.error; // so we know to reinitialize rather than run demo next frame
            return;
        }

        // create array, marshal data from pointer, output, then free the pointer
        byte[] bytes = new byte[cvFrame.bufferSize];
        Marshal.Copy(cvFrame.memPtr, bytes, 0, cvFrame.bufferSize);
        OutputFrame(cvFrame, bytes);
        OpenCVInterop.FreeMemory();
    }
Ejemplo n.º 13
0
    public void checkCurrentFrameSetId()
    {
        string currentStereoFramesSetId     = OpenCVInterop.getFramesSetId(2);
        string currentFirstCameraFramesSet  = OpenCVInterop.getFramesSetId(0);
        string currentSecondCameraFramesSet = OpenCVInterop.getFramesSetId(1);

        string frameSetsConcatenated = currentStereoFramesSetId + currentFirstCameraFramesSet + currentSecondCameraFramesSet;

        if (PlayerPrefs.GetString("calibrationId") != frameSetsConcatenated)
        {
            string message = checkForChangesInFolders(currentStereoFramesSetId, currentFirstCameraFramesSet, currentSecondCameraFramesSet);
            if (message.Length == 0)
            {
                estimatedTimeInSeconds = OpenCVInterop.getEstimatedCalibrationTime();
                gameObject.GetComponentInChildren <TextMeshProUGUI>().SetText("PRZEWIDYWANY CZAS KALIBRACJI: " + formatTimeInSeconds(estimatedTimeInSeconds));
                calibrateButton.gameObject.SetActive(true);
            }
            else
            {
                gameObject.GetComponentInChildren <TextMeshProUGUI>().SetText("WPROWADZONO ZMIANY W FOLDERACH ZE ZDJĘCIAMI" + message);
            }
        }
        else
        {
            gameObject.GetComponentInChildren <TextMeshProUGUI>().SetText("KALIBRACJA ZOSTAŁA JUŻ PRZEPROWADZONA NA TYM ZESTAWIE ZDJĘĆ");
        }
    }
Ejemplo n.º 14
0
    public void initializeCameras()
    {
        errorMessagePanel.gameObject.SetActive(false);
        int camWidth = 640, camHeight = 480;

        Debug.Log("init SDL");
        int result = OpenCVInterop.InitSDLCameras(ref camWidth, ref camHeight);

        if (result < 0)
        {
            errorMessagePanel.gameObject.SetActive(true);
            if (result == -1)
            {
                Debug.LogWarningFormat("Failed to detect cameras", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("Failed to initialize cameras", GetType());
            }
        }
        else
        {
            SceneManager.LoadScene((int)Menu.Scenes.MainMenu);
        }
    }
Ejemplo n.º 15
0
 void OnApplicationQuit()
 {
     if (cameraState == CameraState.connected)
     {
         OpenCVInterop.Close();
         Destroy(tex);
     }
 }
 void OnApplicationQuit()
 {
     if (_ready)
     {
         Debug.Log("sdfsdf");
         OpenCVInterop.Close();
     }
 }
Ejemplo n.º 17
0
 public void deleteCurrentFrames()
 {
     invalidFrames++;
     validPairsCount = totalFrames - invalidFrames;
     OpenCVInterop.deleteCurrentSingleCameraFrame();
     currentPairNumber--;
     moveToNextFrame();
 }
Ejemplo n.º 18
0
 void OnApplicationQuit()
 {
     if (_ready)
     {
         Debug.LogWarningFormat("before closing application", GetType());
         OpenCVInterop.Close();
         Debug.LogWarningFormat("before closing application", GetType());
     }
 }
    private int maxPerFrame = 0;                                // Maximum number of gestures to detect in the current frame

    public GestureCascade(GestureType type, String filePath, int maxPerFrame, Color color)
    {
        // Initialize the cascades with the files. The library will read the physical file with the clasifier
        cascade = OpenCVInterop.InitCascade(filePath);

        this.type        = type;
        this.maxPerFrame = maxPerFrame;
        this.color       = color;
    }
Ejemplo n.º 20
0
 protected void readMarkerCoordinates(Tuple <Texture2D, Texture2D> textures, Tuple <IntPtr, IntPtr> pixelPtrs)
 {
     unsafe
     {
         fixed(CvCoordinates *outBalls = _balls)
         {
             OpenCVInterop.realTimeMonitoring(pixelPtrs.Item1, pixelPtrs.Item2, textures.Item1.width, textures.Item2.height, outBalls, ref performTracking, ref delay);
         }
     }
 }
Ejemplo n.º 21
0
    protected void checkFPS()
    {
        float currentFPS = OpenCVInterop.getFPS();

        if (lastFPS != currentFPS)
        {
            Debug.Log("fps: " + currentFPS);
            lastFPS = currentFPS;
        }
    }
Ejemplo n.º 22
0
    protected void initializeScene()
    {
        OpenCVInterop.setExpectedNumberOfMarkerPairs(expectedNumberOfMarkerPairs);
        display = new FrameDisplay();
        InitTexture();
        var textures = display.getTextures();

        firstFrame.texture  = textures.Item1;
        secondFrame.texture = textures.Item2;
        _balls = new CvCoordinates[expectedNumberOfMarkerPairs]; //tworzymy bufor o podanej wielkoœci
    }
    void MatToTexture2D()
    {
        //Convert Mat to Texture2D
        int tw = tex.width;
        int th = tex.height;

        OpenCVInterop.GetRawImageBytes(ref pixelPtr, ref tw, ref th);
        //Update the Texture2D with array updated in C++
        tex.SetPixels32(pixel32);
        tex.Apply();
        //Debug.Log("Tex Updated");
    }
Ejemplo n.º 24
0
 public void moveToNextFrame()
 {
     if (OpenCVInterop.moveToNextSingleCameraFrame())
     {
         currentPairNumber++;
         updateLabels();
         MatToTexture2D();
     }
     else
     {
         PlayerPrefs.SetString("SingleCameraValidationId" + cameraId.value, OpenCVInterop.getFramesSetId(cameraId.value));
         SceneManager.LoadScene((int)Menu.Scenes.FrameCheckMenu);
     }
 }
 public void moveToNextFrame()
 {
     if (OpenCVInterop.moveToNextStereoFrames())
     {
         currentPairNumber++;
         updateLabels();
         MatToTexture2D();
     }
     else
     {
         PlayerPrefs.SetString("StereoValidationId", OpenCVInterop.getFramesSetId(2));
         SceneManager.LoadScene((int)Menu.Scenes.CalibrationMenu);
     }
 }
    void ThreadedWork()
    {
        _threadRunning = true;
        bool workDone = false;

        CvCircle[] _faces;


        int result = OpenCVInterop.Init();

        if (result < 0)
        {
            if (result == -1)
            {
                Debug.LogWarningFormat("[{0}] Failed to find cascades definition.", GetType());
            }
            else if (result == -2)
            {
                Debug.LogWarningFormat("[{0}] Failed to open camera stream.", GetType());
            }

            return;
        }


        _faces = new CvCircle[2];
        //NormalizedFacePositions = new List<Vector3>();
        _ready = true;

        // This pattern lets us interrupt the work at a safe point if neeeded.
        while (_threadRunning && !workDone)
        {
            if (!_ready)
            {
                return;
            }

            unsafe
            {
                fixed(CvCircle *outFaces = _faces)
                {
                    OpenCVInterop.RunServer(outFaces);
                }
            }

            NormalizedFacePosition = new Vector3(((float)(640 - _faces[0].X) * DetectionDownScale) / 640f, 1f - (((float)_faces[0].Y * DetectionDownScale) / 480f), _faces[0].Radius);
        }
        _threadRunning = false;
    }
 public void clearCalibrationFramesFolder()
 {
     if (captureMode.value == 0)
     {
         OpenCVInterop.clearSingleCameraFramesFolder(0);
     }
     else if (captureMode.value == 1)
     {
         OpenCVInterop.clearSingleCameraFramesFolder(1);
     }
     else if (captureMode.value == 2)
     {
         OpenCVInterop.clearStereoCalibrationFramesFolder();
     }
 }
    // Start is called before the first frame update
    void Start()
    {
        int camWidth = 0, camHeight = 0;
        int result = OpenCVInterop.Init(ref camWidth, ref camHeight);

        if (result < 0)
        {
            Debug.LogWarningFormat("Failed to initialize DLL");
            return;
        }
        CameraResolution        = new Vector2(camWidth, camHeight);
        _faces                  = new CvCircle[_maxFaceDetectCount];
        NormalizedFacePositions = new List <Vector2>();
        OpenCVInterop.SetScale(DetectionDownScale);
        _ready = true;
    }
Ejemplo n.º 29
0
    void updateRotationQueue()
    {
        int size = 0;

        unsafe
        {
            fixed(CvVector3D *outRotationVector = _rotationVec)
            {
                OpenCVInterop.getRotationVectorsAndSize(outRotationVector, ref size);
            }
        }
        _rotationVectorSize = size;
        if (size > 0)
        {
            insertInRotationQueue();
        }
    }
Ejemplo n.º 30
0
    // To be used for comparing stored images.
    public static float compute_mssim_string(string imgpath1, string imgpath2)
    {
        // Adds the application data path which leads to the project's asset folder.
        string path1 = Application.dataPath + "/" + imgpath1;
        string path2 = Application.dataPath + "/" + imgpath2;

        // Does any of the images exist?
        if (File.Exists(path1) && File.Exists(path2))
        {
            return(OpenCVInterop.mssim_string(path1, path2));
        }
        else
        {
            Debug.Log("Could not find the following image paths:\n" + path1 + "\n" + path2);
            return(-1.0f);
        }
    }