Esempio n. 1
0
    // Use this for initialization
    void Start()
    {
        WebCamDevice[] devices     = WebCamTexture.devices;
        int            cameraCount = devices.Length;

        if (cameraCount == 0)
        {
            Image <Bgr, Byte> img = new Image <Bgr, byte>(640, 240);
            CvInvoke.PutText(img, String.Format("{0} camera found", devices.Length), new System.Drawing.Point(10, 60),
                             Emgu.CV.CvEnum.FontFace.HersheyDuplex,
                             1.0, new MCvScalar(0, 255, 0));
            Texture2D texture = TextureConvert.ImageToTexture2D(img, FlipType.Vertical);

            RenderTexture(texture);
            ResizeTexture(texture);
            //this.GetComponent<GUITexture>().texture = texture;
            //this.GetComponent<GUITexture>().pixelInset = new Rect(-img.Width/2, -img.Height/2, img.Width, img.Height);
        }
        else
        {
            webcamTexture = new WebCamTexture(devices[0].name);

            baseRotation = transform.rotation;
            webcamTexture.Play();
            //data = new Color32[webcamTexture.width * webcamTexture.height];
            CvInvoke.CheckLibraryLoaded();
        }
    }
Esempio n. 2
0
    // Use this for initialization
    void Start()
    {
        Image <Bgr, Byte> img = new Image <Bgr, byte>(640, 240);

        String openclStr = "None";

        if (CvInvoke.HaveOpenCL)
        {
            //StringBuilder builder = new StringBuilder();
            using (VectorOfOclPlatformInfo oclPlatformInfos = OclInvoke.GetPlatformInfo())
            {
                if (oclPlatformInfos.Size > 0)
                {
                    OclPlatformInfo platformInfo = oclPlatformInfos[0];
                    openclStr = platformInfo.ToString();
                }
            }
        }

        CvInvoke.PutText(img, String.Format("Emgu CV for Unity {0}", Emgu.Util.Platform.OperationSystem), new System.Drawing.Point(10, 60), Emgu.CV.CvEnum.FontFace.HersheyDuplex,
                         1.0, new MCvScalar(0, 255, 0));

        CvInvoke.PutText(img, String.Format("OpenCL: {0}", openclStr), new System.Drawing.Point(10, 120), Emgu.CV.CvEnum.FontFace.HersheyDuplex,
                         1.0, new MCvScalar(0, 0, 255));

        Texture2D texture = TextureConvert.ImageToTexture2D(img, FlipType.Vertical);

        this.GetComponent <GUITexture>().texture    = texture;
        this.GetComponent <GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
    }
Esempio n. 3
0
    // Use this for initialization
    void Start()
    {
        Texture2D lenaTexture = Resources.Load <Texture2D>("lena");

        //updateTextureWithString("load lena ok");
        Image <Bgr, Byte> img = TextureConvert.Texture2dToImage <Bgr, byte>(lenaTexture);
        //updateTextureWithString("convert to image ok");

        //String fileName = "haarcascade_frontalface_default";
        //String fileName = "lbpcascade_frontalface";
        String fileName = "haarcascade_frontalface_alt2";
        String filePath = Path.Combine(Application.persistentDataPath, fileName + ".xml");
        //if (!File.Exists(filePath))
        {
            //updateTextureWithString("start move cascade xml");
            TextAsset cascadeModel = Resources.Load <TextAsset>(fileName);

#if UNITY_METRO
            UnityEngine.Windows.File.WriteAllBytes(filePath, cascadeModel.bytes);
#else
            File.WriteAllBytes(filePath, cascadeModel.bytes);
#endif
            //updateTextureWithString("File size: " + new FileInfo(filePath).Length);
        }


        using (CascadeClassifier classifier = new CascadeClassifier(filePath))
            using (Image <Gray, Byte> gray = img.Convert <Gray, byte>())
            {
                //updateTextureWithString("classifier create ok");

                Rectangle[] faces = null;
                try
                {
                    faces = classifier.DetectMultiScale(gray);

                    //updateTextureWithString("face detected");
                    foreach (Rectangle face in faces)
                    {
                        CvInvoke.Rectangle(img, face, new MCvScalar(0, 255, 0));
                    }
                }
                catch (Exception e)
                {
                    //updateTextureWithString(e.Message);
                    return;
                }

                //updateTextureWithString(String.Format("{0} face found on image of {1} x {2}", faces.Length, img.Width, img.Height));
            }

        Texture2D texture = TextureConvert.ImageToTexture2D(img, FlipType.Vertical);

        this.GetComponent <GUITexture>().texture    = texture;
        this.GetComponent <GUITexture>().pixelInset = new Rect(-img.Width / 2, -img.Height / 2, img.Width, img.Height);
    }
Esempio n. 4
0
    private void updateTextureWithString(String text)
    {
        Image <Bgr, Byte> img = new Image <Bgr, byte>(640, 240);

        CvInvoke.PutText(img, text, new System.Drawing.Point(10, 60), Emgu.CV.CvEnum.FontFace.HersheyDuplex,
                         1.0, new MCvScalar(0, 255, 0));

        Texture2D texture = TextureConvert.ImageToTexture2D(img);

        this.GetComponent <GUITexture>().texture = texture;
    }
    private void AnalyseFrame()
    {
        if (frame != null)
        {
            frame.Dispose();
        }
        frame = capture.QueryFrame();
        if (frame != null)
        {
            GameObject.Destroy(cameraTex);
            cameraTex = TextureConvert.ImageToTexture2D <Bgr, byte>(frame, true);
            Sprite.DestroyImmediate(CameraImageUI.GetComponent <UnityEngine.UI.Image>().sprite);
            CameraImageUI.sprite = Sprite.Create(cameraTex, new Rect(0, 0, cameraTex.width, cameraTex.height), new Vector2(0.5f, 0.5f));
        }
        if (true)
        //if (!processingFrame)
        {
            processingFrame = true;

            board = ImageTools.ReadFromFrame(frame.Clone(), filteringParameters);

            if (lookupImage != null)
            {
                lookupImage.Dispose();
            }

            if (board != null)
            {
                lookupImage = ImageTools.DrawRooms(320, 240, board.Grid);
            }
            else
            {
                lookupImage = new Image <Bgr, byte>(320, 240, new Bgr(0, 0, 0));
            }

            if (lookupImage != null)
            {
                GameObject.Destroy(lookupTex);
                lookupTex = TextureConvert.ImageToTexture2D <Bgr, byte>(lookupImage, true);
                Sprite.DestroyImmediate(LookupUI.GetComponent <UnityEngine.UI.Image>().sprite);
                LookupUI.sprite = Sprite.Create(lookupTex, new Rect(0, 0, lookupTex.width, lookupTex.height), new Vector2(0.5f, 0.5f));
            }
            processingFrame = false;
        }
    }
    // Start is called before the first frame update
    void Start()
    {
        capture = new VideoCapture(0);
        Image <Bgr, Byte> initialFrame = capture.QueryFrame().ToImage <Bgr, Byte>();

        tracker  = new TrackerCSRT();
        RawImage = GetComponent <RawImage>();

        // Drawing inital handBox
        float midX = (initialFrame.Width / 2);
        float midY = (initialFrame.Height / 2);

        handBox = new Rectangle((int)midX - 100, (int)midY - 100, 200, 200);

        initialFrame.Draw(handBox, new Bgr(System.Drawing.Color.Green), 3);
        texture          = TextureConvert.ImageToTexture2D <Bgr, Byte>(initialFrame, FlipType.Vertical);
        RawImage.texture = texture;
    }
    // Update is called once per frame
    void Update()
    {
        Image <Bgr, Byte> frame = capture.QueryFrame().ToImage <Bgr, Byte>();

        // Initialize tracker
        if (Input.GetKeyDown("space") && !isReady)
        {
            print("space key was pressed");
            tracker.Init(frame.Mat, handBox);
            isReady = true;

            // Move Raw Image to bottom right hand corner
            RawImage.rectTransform.anchorMin = new Vector2(1, 0);
            RawImage.rectTransform.anchorMax = new Vector2(1, 0);
            RawImage.rectTransform.pivot     = new Vector2(1, 0);
            RawImage.rectTransform.sizeDelta = new Vector2(319, 179);
        }

        // User has already initalized tracker, do tracking on this frame
        if (isReady)
        {
            Rectangle box;
            tracker.Update(frame.Mat, out box);
            if (box != null)
            {
                frame.Draw(box, new Bgr(System.Drawing.Color.Green), 3);

                // Invert y axis
                Vector3 pos = VideoCoordToScreenCoord(box.X, box.Y, frame.Width, frame.Height);
                pos.y = pos.y * -1;
                hand.transform.position = pos;
            }
            else
            {
                Debug.Log("Box is null");
            }
        }
        else
        {
            frame.Draw(handBox, new Bgr(System.Drawing.Color.Green), 3);
        }
        RawImage.texture = TextureConvert.ImageToTexture2D <Bgr, byte>(frame, FlipType.Vertical);
    }