Esempio n. 1
0
        /// <summary>
        /// The main event handler that is called whenever the camera capture class
        /// has an new eye video image available.
        /// Starts to process the frame trying to find pupil and glints.
        /// </summary>
        /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/>
        /// image with the new frame.</param>

        private void Device_OnImage(object sender, GTHardware.Cameras.ImageEventArgs e)
        {
            imgCounter++;
            processingDone = false;
            bool processingOk;

            Performance.Now.IsEnabled = false;
            Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop()

            // TrackData object stores all information on pupil centers, glints etc.
            trackData             = new TrackData();
            trackData.TimeStamp   = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond;
            trackData.FrameNumber = imgCounter;

            // Keep reference to image in local variable
            Image <Gray, byte> gray = e.Image;

            // Flip image here, directshow flipping is not supported by every device
            if (Settings.Instance.Camera.FlipImage)
            {
                gray = gray.Flip(FLIP.VERTICAL);
            }

            // Tracking disabled, if visible just set gray image in visulization and return
            if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking)
            {
                Performance.Now.Stop();

                if (Settings.Instance.Visualization.IsDrawing)
                {
                    visualization.Gray      = gray;
                    visualization.TrackData = trackData; // not sure if there is anything to visualize here..
                    CalculateFPS();
                    RaiseFrameProcessingCompletedEvent(true);
                }
                return;
            }

            try
            {
                // Process image, find features, main entry point to processing chain
                trackData.ProcessingOk = detectionManager.ProcessImage(gray, trackData);

                if (trackData.ProcessingOk)
                {
                    if (calibration.CalibMethod.IsCalibrated)
                    {
                        CalculateGazeCoordinates(trackData);

                        if (Settings.Instance.FileSettings.LoggingEnabled)
                        {
                            logGaze.LogData(trackData);
                        }
                    }
                    else
                    {
                        if (isCalibrating)
                        {
                            SaveCalibInfo(trackData);
                        }

                        // Really log uncalibrated data? For pupil size?
                        //if (Settings.Instance.FileSettings.LoggingEnabled)
                        //    logGaze.LogData(trackData);
                    }
                }
                else
                {
                    if (Settings.Instance.FileSettings.LoggingEnabled)
                    {
                        logGaze.LogData(trackData);
                    }
                }
            }
            catch (Exception)
            {
                trackData.ProcessingOk = false;
            }
            // ********************************************************************* MODIFICATO ***********************************************************************
            // Sends values via the UDP server directly

            if (server.SendSmoothedData)
            {
                //    server.SendGazeData(gazeDataSmoothed.GazePositionX, gazeDataSmoothed.GazePositionY,
                //        trackData.PupilDataLeft.Diameter);
                //else
                // Send avg. value
                server.SendGazeData(gazeDataRaw.GazePositionX, gazeDataRaw.GazePositionY, trackData.PupilDataLeft.Diameter);
            }
            //server.SendTrackData(trackData);

            Autotune.Instance.Tune();

            // Set data for visualization
            if (Settings.Instance.Visualization.IsDrawing && isCalibrating == false)
            {
                // Drawn on-demand by calling GetProcessed or GetOriginalImage
                visualization.Gray      = gray.Copy();
                visualization.TrackData = trackData;
            }

            // Recenter camera ROI
            detectionManager.CameraCenterROI(trackData, gray.Size);

            // Store camera roi position
            trackData.CameraROI = GTHardware.Camera.Instance.ROI;

            // Add sample to database
            TrackDB.Instance.AddSample(trackData.Copy());

            // Calculate the frames per second we're tracking at
            CalculateFPS();

            // Stop performance timer
            Performance.Now.Stop();

            // Raise FrameCaptureComplete event (UI listens for updating video stream)
            RaiseFrameProcessingCompletedEvent(trackData.ProcessingOk);
        }
Esempio n. 2
0
        /// <summary>
        /// The main event handler that is called whenever the camera capture class
        /// has an new eye video image available.
        /// Starts to process the frame trying to find pupil and glints.
        /// </summary>
        /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/>
        /// image with the new frame.</param>
        private void Camera_FrameCaptureComplete(Image <Gray, byte> newVideoFrame)
        {
            imgCounter++;
            processingDone = false;
            bool processingOk;

            Performance.Now.IsEnabled = false;
            Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop()

            // TrackData object stores all information on pupil centers, glints etc.
            trackData             = new TrackData();
            trackData.TimeStamp   = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond;
            trackData.FrameNumber = imgCounter;

            // Flip image here, directshow flipping is not supported by every device
            if (GTSettings.Current.Camera.FlipImage)
            {
                newVideoFrame = newVideoFrame.Flip(FLIP.VERTICAL);
            }

            // Set the original gray frame for visualization
            if (isCalibrating == false)
            {
                visualization.Gray = newVideoFrame;
            }

            // Calculate the frames per second we're tracking at
            CalculateFPS();

            if (GTSettings.Current.Visualization.VideoMode == VideoModeEnum.RawNoTracking)
            {
                Performance.Now.Stop();
                RaiseFrameProcessingCompletedEvent(true);
                return;
            }

            try
            {
                // Process image, find features, main entry point to processing chain
                processingOk = detectionManager.ProcessImage(newVideoFrame, trackData);

                if (processingOk)
                {
                    if (calibration.calibMethod.IsCalibrated)
                    {
                        CalculateGazeCoordinates(trackData);

                        if (GTSettings.Current.FileSettings.LoggingEnabled)
                        {
                            logGaze.LogData(trackData);
                        }
                    }
                    else
                    {
                        if (isCalibrating)
                        {
                            SaveCalibInfo(trackData);
                        }

                        if (GTSettings.Current.FileSettings.LoggingEnabled)
                        {
                            logGaze.LogData(trackData);
                        }
                    }
                }
                else
                {
                    if (GTSettings.Current.FileSettings.LoggingEnabled)
                    {
                        logGaze.LogData(trackData);
                    }
                }
            }
            catch (Exception)
            {
                processingOk = false;
            }


            // Add sample to database
            TrackDB.Instance.AddSample(trackData.Copy());

            Autotune.Instance.Tune();

            // Update visualization when features have been detected
            if (isCalibrating == false)
            {
                visualization.Visualize(trackData);
            }

            // Stop performance timer and calculate FPS
            Performance.Now.Stop();

            // Raise FrameCaptureComplete event (UI listens for updating video stream)
            RaiseFrameProcessingCompletedEvent(processingOk);
        }