public static PointDistributor GetInstance()
 {
     if (instance == null)
     {
         instance = new PointDistributor();
     }
     return(instance);
 }
 public static PointDistributor GetInstance()
 {
     if (instance == null)
     {
         instance = new PointDistributor();
     }
     return instance;
 }
示例#3
0
        // sampleCooldown in ms
        public ContinuousGestureLearner(long samplingCooldown)
        {
            this.samplingCooldown = samplingCooldown;
            stopwatch             = new System.Diagnostics.Stopwatch();
            startRecording();
            PointDistributor.GetInstance().OnPointReceived += new GivePoint(GivePoint);

            // Give the current object back a reference to us
            CurrentObjectBag.SCurrentGestureLearner = this;
        }
示例#4
0
        // holdTime in ms
        public DiscreteGestureLearner(long holdTime, double motionEpsilon, int minNumberOfPoints = 5)
        {
            stopwatch              = new System.Diagnostics.Stopwatch();
            outOfPlaneTimer        = new System.Diagnostics.Stopwatch();
            pointBuffer            = new Queue <TimePointTuple>();
            this.minNumberOfPoints = minNumberOfPoints;
            this.holdTime          = holdTime;
            this.motionEpsilon     = motionEpsilon;
            PointDistributor.GetInstance().OnPointReceived += new GivePoint(GivePoint);

            // Give the current object back a reference to us
            CurrentObjectBag.SCurrentGestureLearner = this;
        }
        public GestureValidator(Queue <Point2d> targetPoints, double epsilonBoundary)
        {
            this.targetPoints         = targetPoints;
            this.epsilon              = epsilonBoundary;
            manipulatableTargetPoints = new Queue <Point2d>();
            completedTargets          = new HashSet <Point2d>();
            PointDistributor.GetInstance().OnPointReceived += new GivePoint(GivePoint);
            CurrentObjectBag.SCurrentGestureValidator = this;
            timer     = new System.Diagnostics.Stopwatch();
            lastReset = 0;
            beginPath();

            // Give the current object back a reference to us
            CurrentObjectBag.SCurrentGestureValidator = this;
        }
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Create the drawing group we'll use for drawing
            this.drawingGroup = new DrawingGroup();

            // Create an image source that we can use in our image control
            this.imageSource = new DrawingImage(this.drawingGroup);

            pDistributor = ThreeDAuth.PointDistributor.GetInstance();

            // create a new gesture learner
            gLearner = new ThreeDAuth.DiscreteGestureLearner(2000, 20);

            Image.Source = this.imageSource;

            //start by Siavash
            // Display the drawing using our image control

            this.liveFeedbackGroup = new DrawingGroup();

            this.myFrame = new ThreeDAuth.ReferenceFrame();

            this.handSource = new DrawingImage(this.liveFeedbackGroup);

            myImageBox.Source = this.handSource;

            //End by siavash

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null != this.sensor)
            {

                this.sensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30);

                // Turn on the skeleton stream to receive skeleton frames
                this.sensor.SkeletonStream.Enable();
                // Turn on the depth image stream to receive skeleton frames
                this.sensor.DepthStream.Enable();

                //this.sensor.DepthFrameReady += this.SensorDepthFrameReady;

                //End Siavash

                // Add an event handler to be called whenever there is new color frame data
                //this.sensor.SkeletonFrameReady += this.SensorSkeletonFrameReady;
                this.sensor.AllFramesReady += this.OnAllFramesReady;

                //faceTrackingViewer.setSensor(this.sensor);

                // Start the sensor!
                try
                {
                    this.sensor.Start();
                }
                catch (IOException)
                {
                    this.sensor = null;
                }
            }

            if (null == this.sensor)
            {
                System.Console.WriteLine("The Kinect sensor is not ready");
                //this.statusBarText.Text = Properties.Resources.NoKinectReady;
            }
        }
 public static void SGivePoint(Point p)
 {
     PointDistributor.GetInstance().GivePoint(p);
 }