コード例 #1
0
 public OpenCVFaceDetector(OpenCVFaceDetectorConfig config)
 {
     _inited = false;
     _config = config;
     _img    = new GstImageInfo();
     _img.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_Alpha8);
 }
コード例 #2
0
    // Use this for initialization
    void Start()
    {
        _capDev       = new UnityOpenCVVideoCaptureAPI();
        _faceDetector = new UnityOpenCVFaceDetectorAPI(Application.dataPath + DetectorParameters.cascadesPath, DetectorParameters.resizeFactor, DetectorParameters.scaler, DetectorParameters.minNeighbors,
                                                       DetectorParameters.minSize, DetectorParameters.maxSize);
        _faceDetector.BindCamera(_capDev);
        _image     = new GstImageInfo();
        _faceImage = new GstImageInfo();


        BlitImage = new Texture2D(1, 1);
        FaceImage = new Texture2D(1, 1);
        _image.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);
        _faceImage.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);


        _imageGrabber = new Thread(new ThreadStart(ImageGrabberThread));

        SettingsHolder.Instance.AddDependencyNode(this);


        if (Debugger != null)
        {
            Debugger.AddDebugElement(this);
        }
    }
コード例 #3
0
 void OnImageSampled(ImageSampler s, GstImageInfo ifo)
 {
     if (_detect)
     {
         _faceDetector.BindImage(ifo);
         _processing = true;
     }
     _detect = false;
 }
コード例 #4
0
    void OnEnable()
    {
        CameraPorts cameraports  = CameraPanel.Load();
        List <int>  portSettings = new List <int>();

        portSettings.Add(cameraports.camera1);
        portSettings.Add(cameraports.camera2);
        portSettings.Add(cameraports.camera3);
        portSettings.Add(cameraports.camera4);
        portSettings.Add(cameraports.idMiniROV);
        //Debug.Log("El id está en la posicion: "+portSettings.Count);
        GameObject parent = GameObject.Find("CameraManager");

        for (int i = 0; i < parent.transform.childCount; i++)
        {
            if (this.gameObject.name == ("VideoStream" + (i + 1)))
            {
                port = portSettings[i].ToString();
                int mjpegCamera = portSettings[portSettings.Count - 1];
                if (i == mjpegCamera)
                {
                    pipeline = "udpsrc port=" + port + " ! application/x-rtp,encoding-name=JPEG,payload=26 ! rtpjpegdepay ! jpegdec ! appsink name=videoSink";
                }
                else
                {
                    pipeline = "udpsrc port=" + port + " ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! appsink name=videoSink";
                }
            }
        }

        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        //pipeline = "udpsrc port="+port+" ! application/x-rtp,encoding-name=H264,payload=96 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink name=videoSink";
        m_Texture.SetPipeline(pipeline);          //        pipeline+" ! video/x-raw,format=I420 ! videoconvert ! appsink name=videoSink"
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        m_Texture.Player.CreateStream();
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img = new GstImageInfo();
        _img.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        BlittedImage            = new Texture2D(1, 1);
        BlittedImage.filterMode = FilterMode.Bilinear;
        BlittedImage.anisoLevel = 0;
        BlittedImage.wrapMode   = TextureWrapMode.Clamp;

        if (TargetMaterial != null)
        {
            TargetMaterial.mainTexture = BlittedImage;
        }
    }
コード例 #5
0
 public bool ToImage(GstImageInfo image, int x, int y, int w, int h)
 {
     if (CVVideoCap_ToImage(_instance, image.GetInstance(), x, y, w, h))
     {
         image.UpdateInfo();
         return(true);
     }
     return(false);
 }
コード例 #6
0
ファイル: GstCustomPlayer.cs プロジェクト: red-pencil/ISWC18
    public bool CopyFrame(GstImageInfo image)
    {
        bool ret = mray_gst_customPlayerCopyFrame(m_Instance, image.GetInstance());

        if (ret)
        {
            image.UpdateInfo();
        }
        return(ret);
    }
コード例 #7
0
ファイル: GstCustomPlayer.cs プロジェクト: red-pencil/ISWC18
    public bool CopyFrameCropped(GstImageInfo image, int x, int y, int w, int h)
    {
        bool ret = mray_gst_customPlayerCropFrame(m_Instance, image.GetInstance(), x, y, w, h);

        if (ret)
        {
            image.UpdateInfo();
        }
        return(ret);
    }
コード例 #8
0
    void _initTexture()
    {
        _SrcTexture.OnFrameGrabbed += OnFrameGrabbed;
        _img = new GstImageInfo();
        _img.Create(1, 1, _format);

        _blittedImg            = new Texture2D(1, 1);
        _blittedImg.filterMode = FilterMode.Bilinear;
        _blittedImg.anisoLevel = 0;
        _blittedImg.wrapMode   = TextureWrapMode.Clamp;
    }
コード例 #9
0
 public override void BindImage(GstImageInfo img)
 {
     if (_detecting)
     {
         return;
     }
     img.CloneTo(_img);
     _img.UpdateInfo();
     //_LatLngPos.Set (zoi.Latitude, zoi.Longtitude);
     //_blitRect = zoi.BlitRect;
     _newImage = true;
     _signalEvent.Set();
 }
コード例 #10
0
 public override void CalculateWeights(GstImageInfo image, ImageFeatureMap target)
 {
     if (!Enabled)
     {
         return;
     }
     DetectionTime = 0;
     for (int i = 0; i < Detectors.Count; ++i)
     {
         Detectors [i].CalculateWeights(image, target);
         DetectionTime += Detectors [i].DetectionTime;
     }
 }
コード例 #11
0
ファイル: GStreamerTest1.cs プロジェクト: mrayy/TxKit
    // Use this for initialization
    void Start()
    {
        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        return;

        m_Texture.OnFrameGrabbed += OnFrameGrabbed;
        _img = new GstImageInfo();
        _img.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        BlittedImage            = new Texture2D(1, 1);
        BlittedImage.filterMode = FilterMode.Bilinear;
        BlittedImage.anisoLevel = 0;
        BlittedImage.wrapMode   = TextureWrapMode.Clamp;

        TargetRenderObject.material.mainTexture = BlittedImage;
    }
コード例 #12
0
    // Use this for initialization
    void Start()
    {
        if (SourceTexture == null)
        {
            SourceTexture = GameObject.FindObjectOfType <GstCustomTexture> ();
        }

        _imgSampler = new ImageSampler(SourceTexture);
        _imgSampler.OnImageSampled += OnImageSampled;

        _detector = new UnityOpenCVObjectDetectorAPI();
        _sampling = true;

        _tmp = new GstImageInfo();
        _tmp.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_Alpha8);

        TrackingManager.Instance.Ref();
    }
コード例 #13
0
    public override void CalculateWeights(GstImageInfo image, ImageFeatureMap target)
    {
        DetectionTime = 0;
        if (!Enabled)
        {
            return;
        }
        List <Rect> normObjects = new List <Rect> ();

        _detector.BindImage(image);

        if (!_featuresDetected)
        {
            return;
        }

        _featuresDetected = false;


        int W = image.Width;
        int H = image.Height;

        //fill features to the features map
        foreach (Rect o in _faces)
        {
            //expand the detected area few pixels
            Rect r = new Rect();
            r.x      = o.x + (int)(Params.FaceConfig.ExpansionRect.x * o.width);
            r.y      = o.y + (int)(Params.FaceConfig.ExpansionRect.y * o.height);
            r.width  = o.width + (int)((Params.FaceConfig.ExpansionRect.width) * o.width);
            r.height = o.height + (int)((Params.FaceConfig.ExpansionRect.height) * o.height);
            //fill detected face rectangle with full weight
            target.FillRectangle((float)r.x / (float)W, (float)r.y / (float)H,
                                 (float)r.width / (float)W, (float)r.height / (float)H, 1);
            normObjects.Add(new Rect(r.left / (float)W, r.top / (float)H, r.width / (float)W, r.height / (float)H));
        }


        _detectedObjects = normObjects;
    }
コード例 #14
0
    // Use this for initialization

    private void OnEnable()
    {
        GameObject parent = GameObject.Find("CameraManager");;

        for (int i = 0; i < parent.transform.childCount; i++)
        {
            if (this.gameObject.name == ("VideoStream" + (i + 1)))
            {
                port = this.GetComponentInParent <CameraManager>().GetPorts()[i];
            }
        }

        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        //pipeline = "udpsrc port="+port+" ! application/x-rtp,encoding-name=H264,payload=96 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! appsink name=videoSink";
        pipeline = "udpsrc port=" + port + " ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! appsink name=videoSink";
        m_Texture.SetPipeline(pipeline);          //        pipeline+" ! video/x-raw,format=I420 ! videoconvert ! appsink name=videoSink"
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        m_Texture.Player.CreateStream();
        //StartCoroutine(Dalay());
        System.Threading.Thread.Sleep(100);
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img = new GstImageInfo();
        _img.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        BlittedImage            = new Texture2D(1, 1);
        BlittedImage.filterMode = FilterMode.Bilinear;
        BlittedImage.anisoLevel = 0;
        BlittedImage.wrapMode   = TextureWrapMode.Clamp;

        if (TargetMaterial != null)
        {
            TargetMaterial.mainTexture = BlittedImage;
        }
    }
コード例 #15
0
    // Use this for initialization
    void Start()
    {
        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();
        m_Texture.SetPipeline(pipeline + " ! video/x-raw,format=I420 ! videoconvert ! appsink name=videoSink");
        m_Texture.Player.CreateStream();
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img = new GstImageInfo();
        _img.Create(3840, 2160, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        BlittedImage            = new Texture2D(7680, 2160);
        BlittedImage.filterMode = FilterMode.Bilinear;
        BlittedImage.anisoLevel = 0;
        BlittedImage.wrapMode   = TextureWrapMode.Clamp;

        if (TargetMaterial != null)
        {
            TargetMaterial.mainTexture = BlittedImage;
        }
    }
    // Use this for initialization
    void Start()
    {
        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();

        m_Texture.SetPipeline(pipeline);
        m_Texture.Player.CreateStream();
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img = new GstImageInfo();
        _img.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        BlittedImage            = new Texture2D(1, 1);
        BlittedImage.filterMode = FilterMode.Bilinear;
        BlittedImage.anisoLevel = 0;
        BlittedImage.wrapMode   = TextureWrapMode.Clamp;

        if (TargetMaterial != null)
        {
            TargetMaterial.mainTexture = BlittedImage;
        }
    }
コード例 #17
0
    // Use this for initialization
    void Start()
    {
        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();
        m_Texture.SetPipeline(pipeline + " ! video/x-raw,format=I420 ! videoconvert ! appsink name=videoSink");
        m_Texture.Player.CreateStream();
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img_left = new GstImageInfo();
        _img_left.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        _img_right = new GstImageInfo();
        _img_right.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        blittedImageLeft            = new Texture2D(1, 1);
        blittedImageLeft.filterMode = FilterMode.Bilinear;
        blittedImageLeft.anisoLevel = 0;
        blittedImageLeft.wrapMode   = TextureWrapMode.Clamp;

        blittedImageRight            = new Texture2D(1, 1);
        blittedImageRight.filterMode = FilterMode.Bilinear;
        blittedImageRight.anisoLevel = 0;
        blittedImageRight.wrapMode   = TextureWrapMode.Clamp;

        if (leftUIImage != null)
        {
            leftUIImage.material.mainTexture    = blittedImageLeft;
            leftUIImage.rectTransform.sizeDelta = new Vector2(frameWidth, frameHeight);
        }

        if (rightUIImage != null)
        {
            rightUIImage.material.mainTexture    = blittedImageRight;
            rightUIImage.rectTransform.sizeDelta = new Vector2(frameWidth, frameHeight);
        }
    }
コード例 #18
0
    // Use this for initialization
    void Start()
    {
        m_Texture = gameObject.GetComponent <GstCustomTexture>();
        m_Texture.Initialize();
        string pipeline = "udpsrc port=" + Port.ToString() + " ! application/x-rtp, encoding-name=H264, payload=96 ! rtph264depay ! h264parse ! avdec_h264";

        m_Texture.SetPipeline(pipeline + " ! video/x-raw,format=I420 ! videoconvert ! appsink name=videoSink");
        m_Texture.Player.CreateStream();
        m_Texture.Player.Play();

        m_Texture.OnFrameBlitted += OnFrameBlitted;
        _img_left = new GstImageInfo();
        _img_left.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        _img_right = new GstImageInfo();
        _img_right.Create(1, 1, GstImageInfo.EPixelFormat.EPixel_R8G8B8);

        blittedImageLeft            = new Texture2D(1, 1);
        blittedImageLeft.filterMode = FilterMode.Bilinear;
        blittedImageLeft.anisoLevel = 0;
        blittedImageLeft.wrapMode   = TextureWrapMode.Clamp;

        blittedImageRight            = new Texture2D(1, 1);
        blittedImageRight.filterMode = FilterMode.Bilinear;
        blittedImageRight.anisoLevel = 0;
        blittedImageRight.wrapMode   = TextureWrapMode.Clamp;

        if (leftUIImage != null)
        {
            leftUIImage.texture = blittedImageLeft;
        }

        if (rightUIImage != null)
        {
            rightUIImage.texture = blittedImageRight;
        }
    }
コード例 #19
0
ファイル: IFaceDetector.cs プロジェクト: red-pencil/ISWC18
 public abstract void BindImage(GstImageInfo image);
コード例 #20
0
 void OnImageSampled(ImageSampler sampler, GstImageInfo img)
 {
     _processing = true;
     img.CloneTo(_tmp);
     TrackingManager.Instance.AddTicket(this);
 }
コード例 #21
0
 public void Destroy()
 {
     _img.Destory();
     _img = null;
 }
コード例 #22
0
 public abstract void CalculateWeights(GstImageInfo image, ImageFeatureMap target);
コード例 #23
0
 void OnImageArrived(ImageSampler sampler, GstImageInfo img)
 {
     _newImageArrived = true;
     _textureUpdated  = false;
 }
コード例 #24
0
ファイル: UnityOpenCVAPI.cs プロジェクト: red-pencil/ISWC18
 public void BindImage(GstImageInfo img)
 {
     DN_BindImage(_instance, img.GetInstance());
 }
コード例 #25
0
ファイル: UnityOpenCVAPI.cs プロジェクト: red-pencil/ISWC18
 public bool TrackInImage(GstImageInfo img, ref float x, ref float y)
 {
     return(ObjectTracker_TrackInImage(_instance, img.GetInstance(), ref x, ref y));
 }
コード例 #26
0
ファイル: UnityOpenCVAPI.cs プロジェクト: red-pencil/ISWC18
 public void BindImage(GstImageInfo img)
 {
     ObjectTracker_BindImage(_instance, img.GetInstance());
 }
コード例 #27
0
ファイル: GstImageInfo.cs プロジェクト: mrayy/The-Lightroom
 public void CloneTo(GstImageInfo ifo)
 {
     mray_cloneImageData(GetInstance(), ifo.GetInstance());
 }
コード例 #28
0
ファイル: UnityOpenCVAPI.cs プロジェクト: red-pencil/ISWC18
 public void BindImage(GstImageInfo img)
 {
     FaceDetector_BindImage(_instance, img.GetInstance());
 }