private void Device_OnImage(object sender, GTHardware.Cameras.ImageEventArgs e)
        {
            // dont render while capturing
            if (isRunning == false)
                imageBoxCapturedFrame.Image = e.Image;

            // Store data when displaying points
            if (isRunning && calWindow != null && calWindow.calibrationControl != null)
            {
                // Get location of calibration point
                double x = 0;
                double y = 0;

                // Access canvas location running on different thread by dispatcher
                calWindow.calibrationControl.Dispatcher.BeginInvoke
                (
                    DispatcherPriority.Render,
                    new Action(
                        delegate
                        {
                            x = Math.Round(Canvas.GetTop(calWindow.calibrationControl.CurrentPoint), 0);
                            y = Math.Round(Canvas.GetLeft(calWindow.calibrationControl.CurrentPoint), 0);
                        })
                );

                // Store image and point position
                sequenceData.AddImage(e.Image.Copy(), new Point(x, y));
            }
        }
示例#2
0
        /// <summary>
        /// The main event handler that is called whenever the camera capture class
        /// has an new eye video image available.
        /// Starts to process the frame trying to find pupil and glints.
        /// </summary>
        /// <param name="newVideoFrame">The <see cref="Emgu.CV.Image{Emgu.CV.Structure.Bgr, byte}"/>
        /// image with the new frame.</param>
        private void Device_OnImage(object sender, GTHardware.Cameras.ImageEventArgs e)
        {
            imgCounter++;
            processingDone = false;
            bool processingOk;

            Performance.Now.IsEnabled = false;
            Performance.Now.Start(); // Stop output by setting IsEnabled = false or Stop()

            // TrackData object stores all information on pupil centers, glints etc.
            trackData = new TrackData();
            trackData.TimeStamp = DateTime.UtcNow.Ticks / TimeSpan.TicksPerMillisecond;
            trackData.FrameNumber = imgCounter;

            // Keep reference to image in local variable
            Image<Gray, byte> gray = e.Image;

            // Flip image here, directshow flipping is not supported by every device
            if (Settings.Instance.Camera.FlipImage)
                gray = gray.Flip(FLIP.VERTICAL);

            // Tracking disabled, if visible just set gray image in visulization and return
            if (Settings.Instance.Visualization.VideoMode == VideoModeEnum.RawNoTracking)
            {
                Performance.Now.Stop();

                if (Settings.Instance.Visualization.IsDrawing)
                {
                    visualization.Gray = gray;
                    visualization.TrackData = trackData; // not sure if there is anything to visualize here..
                    CalculateFPS();
                    RaiseFrameProcessingCompletedEvent(true);
                }
                return;
            }

            try
            {
                // Process image, find features, main entry point to processing chain
                trackData.ProcessingOk = detectionManager.ProcessImage(gray, trackData);

                if (trackData.ProcessingOk)
                {
                    if (calibration.CalibMethod.IsCalibrated)
                    {
                        CalculateGazeCoordinates(trackData);

                        if (Settings.Instance.FileSettings.LoggingEnabled)
                            logGaze.LogData(trackData);
                    }
                    else
                    {
                        if (isCalibrating)
                            SaveCalibInfo(trackData);

                        // Really log uncalibrated data? For pupil size?
                        //if (Settings.Instance.FileSettings.LoggingEnabled)
                        //    logGaze.LogData(trackData);
                    }
                }
                else
                {
                    if (Settings.Instance.FileSettings.LoggingEnabled)
                        logGaze.LogData(trackData);
                }
            }
            catch (Exception)
            {
                trackData.ProcessingOk = false;
            }
            // ********************************************************************* MODIFICATO ***********************************************************************
            // Sends values via the UDP server directly

            if (server.SendSmoothedData)
                //    server.SendGazeData(gazeDataSmoothed.GazePositionX, gazeDataSmoothed.GazePositionY,
                //        trackData.PupilDataLeft.Diameter);
                //else
                // Send avg. value
                server.SendGazeData(gazeDataRaw.GazePositionX, gazeDataRaw.GazePositionY, trackData.PupilDataLeft.Diameter);
                //server.SendTrackData(trackData);

            Autotune.Instance.Tune();

            // Set data for visualization
            if (Settings.Instance.Visualization.IsDrawing && isCalibrating == false)
            {
                // Drawn on-demand by calling GetProcessed or GetOriginalImage
                visualization.Gray = gray.Copy();
                visualization.TrackData = trackData;
            }

            // Recenter camera ROI
            detectionManager.CameraCenterROI(trackData, gray.Size);

            // Store camera roi position
            trackData.CameraROI = GTHardware.Camera.Instance.ROI;

            // Add sample to database
            TrackDB.Instance.AddSample(trackData.Copy());

            // Calculate the frames per second we're tracking at
            CalculateFPS();

            // Stop performance timer
            Performance.Now.Stop();

            // Raise FrameCaptureComplete event (UI listens for updating video stream)
            RaiseFrameProcessingCompletedEvent(trackData.ProcessingOk);
        }