/// <summary>
    /// Unity Start function.
    /// </summary>
    private void Start()
    {
        //Get the current ARCore session
        m_Session = ARSessionManager.Instance.GetSession();
        // Multiply it by scaleFactor
        m_Session.transform.localScale *= m_scaleFactor;

        //Initialize lists
        m_OldImages       = new List <AugmentedImage>();
        m_CurrFrameImages = new List <AugmentedImage>();
        m_Visualizers     = new List <ImageVisualizer>();

        //Set the device screen to never timeout
        //Screen.sleepTimeout = SleepTimeout.NeverSleep;

        //Initialize object pool of ImageVisualizers
        for (int i = 0; i < m_PoolStartingSize; i++)
        {
            //Create new instance of ImageVisualizer
            ImageVisualizer visualizer = Instantiate(m_ImageVisualizerPrefab);
            //Deactivate visualizer's gameobjec
            visualizer.gameObject.SetActive(false);

            //Add instance to object pool
            ObjectPool.Add(visualizer);
        }

        // Set Overlay Screen active
        m_FitToScanOverlay.SetActive(true);
    }
示例#2
0
    // Initialize the sensor
    public void InitilizeSensor(int uniqueID, string sensorURL, int sensorPort, List <string> sensorSubscribers)
    {
        Debug.Log("Init Camera Connection at " + sensorURL + ":" + sensorPort.ToString());

        ros       = new ROSBridgeWebSocketConnection(sensorURL, sensorPort);
        client_id = uniqueID.ToString();
        foreach (string subscriber in sensorSubscribers)
        {
            string subscriberTopic = "";
            switch (subscriber)
            {
            default:
                subscriberTopic = "/dji_sdk/" + subscriber;
                ImageVisualizer imageVisualizer = new ImageVisualizer();
                //imageVisualizer.SetParentTransform(this.transform);
                imageVisualizers.Add(subscriberTopic, imageVisualizer);
                Debug.Log("Camera subscribing to: " + subscriberTopic);
                ros.AddSubscriber("/dji_sdk/" + subscriber, this);
                break;
            }
            Debug.Log("Camera Subscribing to : " + subscriberTopic);
            sensorSubscriberTopics.Add(subscriberTopic);
            keyList.Add(subscriberTopic, true);
        }

        ros.Connect();
    }
    /// <summary>
    /// Creates new visualizers for the AugmentedImages in the list.
    /// </summary>
    /// <param name="_imagesToVisualize"></param>
    private void CreateVisualizerObjects(List <AugmentedImage> _imagesToVisualize)
    {
        foreach (AugmentedImage image in _imagesToVisualize)
        {
            //Create an anchor at the centre of the image
            Anchor anchor = image.CreateAnchor(image.CenterPose);
            anchor.transform.position *= m_scaleFactor;

            Debug.LogWarning("Pose Pos: " + image.CenterPose.position);
            Debug.LogWarning("Anchor Pos: " + anchor.transform.position);
            Debug.LogWarning("Scaled Anchor Pos: " + anchor.transform.position);

            if (anchor != null)
            {
                //Get a visualizer from the object pool
                ImageVisualizer visualizer = ObjectPool.GetExistingOrNew <ImageVisualizer>();

                //Set the image
                visualizer.Image = image;
                //Set the anchor
                visualizer.Anchor = anchor;

                //Enable the visualizer
                visualizer.gameObject.SetActive(true);
                //Add visualizer to list
                m_Visualizers.Add(visualizer);

                //Only add image to old image list if anchor has been made for it
                m_OldImages.Add(image);
            }
        }
    }
示例#4
0
        static void Main(string[] args)
        {
            Mat m = new Mat(640, 480, DepthType.Cv8U, 3);

            m.SetTo(new MCvScalar(255, 0, 0));
            ImageVisualizer.TestShowVisualizer(m);
        }
示例#5
0
    // Update is called once per frame
    void Update()
    {
        if (Session.Status != SessionStatus.Tracking)
        {
            return;
        }

        Session.GetTrackables(_images, TrackableQueryFilter.Updated);

        foreach (var image in _images)
        {
            ImageVisualizer visualizer = null;
            m_Visualizers.TryGetValue(image.DatabaseIndex, out visualizer);
            if (image.TrackingState == TrackingState.Tracking && visualizer == null)
            {
                Anchor anchor = image.CreateAnchor(image.CenterPose);
                visualizer                  = (ImageVisualizer)Instantiate(ImageVisualizerPrefab, anchor.transform.position, anchor.transform.rotation);
                visualizer.Image            = image;
                visualizer.transform.parent = anchor.transform;
                visualizer.transform.Rotate(90, 0, 0);
                m_Visualizers.Add(image.DatabaseIndex, visualizer);
            }
            else if (image.TrackingState == TrackingState.Stopped && visualizer != null)
            {
                m_Visualizers.Remove(image.DatabaseIndex);
                Destroy(visualizer.gameObject);
            }
        }
    }
示例#6
0
    // ROS Topic Subscriber methods
    public ROSBridgeMsg OnReceiveMessage(string topic, JSONNode raw_msg, ROSBridgeMsg parsed = null)
    {
        Debug.Log("Camera Recieved message");

        ROSBridgeMsg result = null;

        ImageMsg meshMsg = new ImageMsg(raw_msg);
        // Obtain visualizer for this topic
        ImageVisualizer visualizer = imageVisualizers[topic];

        this.imageVisualizers[topic].SetFrame(meshMsg, videoType);
        return(result);
    }
示例#7
0
        public uEyeCamera(int ID, string parPath, bool stack, bool disp, int X, int targetFR, PulsePal <MovementData> pulsePal)
        {
            this.stack           = stack;
            this.targetFrameRate = targetFR;
            this.display         = disp;
            this.pulsePal        = pulsePal;
            this.ID = ID.ToString();
            //check all the available IDS cameras
            m_Camera = new uEye.Camera();
            uEye.Types.CameraInformation[] cameraList;
            uEye.Info.Camera.GetCameraList(out cameraList);
            firstFrame = new Frame();

            //if there are no cameras available then return with a false state
            if (cameraList.Length == 0)
            {
                m_IsLive = false;
            }
            else
            {
                //initialize the camera with the appropriate ID
                uEye.Defines.Status statusRet;
                statusRet = CameraInit(Convert.ToInt32(cameraList[ID].DeviceID));

                //if initialization is successful
                if (statusRet == uEye.Defines.Status.SUCCESS)
                {
                    //try to capture a frame
                    statusRet = m_Camera.Acquisition.Capture();
                    if (statusRet != uEye.Defines.Status.SUCCESS)
                    {
                        m_IsLive = false;
                        MessageBox.Show("Starting Live Video Failed");
                    }
                    else
                    {
                        m_IsLive = true;

                        //load camera parameters from external file
                        LoadParametersFromFile(parPath);
                        m_Camera.Video.ResetCount();
                        //if frames are to save then initialize frame queue
                        //if (stack)
                        //queue = new ConcurrentStack<Frame>();

                        //if frames are to display initialize display window
                        if (display)
                        {
                            provider = new ServiceProvider();
                            vis      = new TypeVisualizerDialog();
                            provider.services.Add(vis);
                            imVis = new ImageVisualizer();
                            System.Drawing.Rectangle rect;
                            m_Camera.Size.AOI.Get(out rect);
                            imVis.Load(provider, rect.Width, rect.Height);
                            vis.Show();
                            vis.Location = new System.Drawing.Point(X, 0);
                        }
                    }
                    //perform onFrameEvent routine when a EventFrame occurs
                    m_Camera.EventFrame += OnFrameEvent;
                }
                //if initialization was unsuccessful then exit
                if (statusRet != uEye.Defines.Status.SUCCESS && m_Camera.IsOpened)
                {
                    m_Camera.Exit();
                }
            }
        }
示例#8
0
        public uEyeCamera(int ID, string parPath, bool stack, bool disp, int X)
        {
            this.stack   = stack;
            this.display = disp;

            //check all the available IDS cameras
            m_Camera = new uEye.Camera();
            uEye.Types.CameraInformation[] cameraList;
            uEye.Info.Camera.GetCameraList(out cameraList);

            //if there are no cameras available then return with a false state
            if (cameraList.Length == 0)
            {
                m_IsLive = false;
            }
            else
            {
                //initialize the camera with the appropriate ID
                uEye.Defines.Status statusRet;
                statusRet = CameraInit(Convert.ToInt32(cameraList[ID].CameraID));

                //if initialization is successful
                if (statusRet == uEye.Defines.Status.SUCCESS)
                {
                    //try to capture a frame
                    statusRet = m_Camera.Acquisition.Capture();
                    if (statusRet != uEye.Defines.Status.SUCCESS)
                    {
                        m_IsLive = false;
                        MessageBox.Show("Starting Live Video Failed");
                    }
                    else
                    {
                        m_IsLive = true;

                        //load camera parameters from external file
                        LoadParametersFromFile(parPath);
                        m_Camera.Video.ResetCount();

                        //if frames are to save then initialize frame queue
                        if (stack)
                        {
                            queue = new ConcurrentStack <Frame>();
                        }

                        //if frames are to display initialize display window
                        if (display)
                        {
                            provider = new ServiceProvider();
                            vis      = new TypeVisualizerDialog();
                            provider.services.Add(vis);
                            imVis = new ImageVisualizer();
                            imVis.Load(provider);
                            vis.Show();
                            vis.Location = new System.Drawing.Point(X, 0);
                        }
                    }
                }
                //if initialization was unsuccessful then exit
                if (statusRet != uEye.Defines.Status.SUCCESS && m_Camera.IsOpened)
                {
                    m_Camera.Exit();
                }
            }
        }