コード例 #1
0
ファイル: TouchSegmentation.cs プロジェクト: tcboy88/touchcam
        public static bool UpdateWithReading(Sensors.Reading reading)
        {
            FPS.Sensors.Update();
            frameRate = FPS.Sensors.Average;

            UpdateMovingAverages(reading);

            if (touchDown)
            {
                if (movingAverage1 > thresh && movingAverage2 > thresh)
                {
                    OnTouchUp();
                    touchDown = false;
                }
            }
            else
            {
                if (movingAverage1 < thresh || movingAverage2 < thresh)
                {
                    OnTouchDown();
                    touchDown = true;
                }
            }

            return(touchDown);
        }
コード例 #2
0
        public void UpdateVisualization()
        {
            if (DefaultGesture)
            {
                NoVisualization(); return;
            }

            Pen irPen = new Pen(Brushes.Black, 9);
            Pen xPen  = new Pen(Brushes.Red, 5);
            Pen yPen  = new Pen(Brushes.Green, 5);
            Pen zPen  = new Pen(Brushes.Blue, 5);

            float duration = correctedSensorReadings[correctedSensorReadings.Count - 1].Timestamp - correctedSensorReadings[0].Timestamp;

            duration /= 1000.0f;

            Bitmap   img = new Bitmap(640, 640);
            Graphics g   = Graphics.FromImage(img);

            //g.DrawLine(Pens.Black, 0, 320, 640, 320);
            Sensors.Reading prevReading = null;
            float           prevX       = 0;

            foreach (Sensors.Reading reading in correctedSensorReadings)
            {
                float x = img.Width * (float)(reading.Timestamp - correctedSensorReadings[0].Timestamp) / (duration * 1000);
                if (prevReading != null)
                {
                    // draw IR1
                    float y     = img.Height * (1 - reading.InfraredReflectance1);
                    float prevY = img.Height * (1 - prevReading.InfraredReflectance1);
                    g.DrawLine(irPen, prevX, prevY, x, y);

                    // draw IR2
                    y     = img.Height * (1 - reading.InfraredReflectance2);
                    prevY = img.Height * (1 - prevReading.InfraredReflectance2);
                    g.DrawLine(irPen, prevX, prevY, x, y);

                    // draw Accelerometer1.X
                    y     = img.Height / 2 + img.Height / 2 * (-reading.Accelerometer1.X / 20);
                    prevY = img.Height / 2 + img.Height / 2 * (-prevReading.Accelerometer1.X / 20);
                    g.DrawLine(xPen, prevX, prevY, x, y);

                    // draw Accelerometer1.Y
                    y     = img.Height / 2 + img.Height / 2 * (-reading.Accelerometer1.Y / 20);
                    prevY = img.Height / 2 + img.Height / 2 * (-prevReading.Accelerometer1.Y / 20);
                    g.DrawLine(yPen, prevX, prevY, x, y);

                    // draw Accelerometer1.Z
                    y     = img.Height / 2 + img.Height / 2 * (-reading.Accelerometer1.Z / 20);
                    prevY = img.Height / 2 + img.Height / 2 * (-prevReading.Accelerometer1.Z / 20);
                    g.DrawLine(zPen, prevX, prevY, x, y);
                }
                prevReading = reading;
                prevX       = x;
            }

            visualization = img;
        }
コード例 #3
0
        public static void LogSensorReading(Sensors.Reading reading)
        {
            if (!Running)
            {
                return;
            }

            eventQueue.Add(new SensorReadingEvent(reading));
        }
コード例 #4
0
        public static void PreprocessGesture(Gesture gesture, bool useSecondSensor = false)
        {
            Quaternion orientation1 = gesture.SensorReadings[0].Orientation1;

            //Quaternion orientation2 = gesture.SensorReadings[0].Orientation2;
            foreach (Sensors.Reading reading in gesture.SensorReadings)
            {
                Sensors.Reading correctedReading = OrientationTracker.SubtractGravity(reading, useSecondSensor);
                correctedReading.Magnetometer1 = OrientationTracker.SubtractOrientation(correctedReading.Magnetometer1, orientation1);
                //if (useSecondSensor) correctedReading.Magnetometer2 = OrientationTracker.SubtractOrientation(correctedReading.Magnetometer2, orientation2);
                gesture.CorrectedSensorReadings.Add(correctedReading);
            }

            gesture.Features = getFeatures(gesture.CorrectedSensorReadings);
            //gesture.Features = getFeatures(gesture.SensorReadings);
        }
コード例 #5
0
        public static Sensors.Reading SubtractGravity(Sensors.Reading sensorReading, bool useSecondSensor = false)
        {
            Sensors.Reading newReading = sensorReading.Clone();

            Quaternion orientation1         = sensorReading.Orientation1;
            Point3D    rotatedAcceleration1 = orientation1.RotateVector(sensorReading.Accelerometer1);

            rotatedAcceleration1.Z   -= (float)GRAVITY;
            newReading.Accelerometer1 = orientation1.InverseRotateVector(rotatedAcceleration1);

            //if (useSecondSensor)
            //{
            //    Quaternion orientation2 = sensorReading.Orientation2;
            //    Point3D rotatedAcceleration2 = orientation2.RotateVector(sensorReading.Accelerometer2);
            //    rotatedAcceleration2.Z -= (float)GRAVITY;
            //    newReading.Accelerometer2 = orientation2.InverseRotateVector(rotatedAcceleration2);
            //}

            return(newReading);
        }
コード例 #6
0
ファイル: TouchSegmentation.cs プロジェクト: tcboy88/touchcam
        static float windowSize = 1.0f / 8.0f; // in seconds
        static void UpdateMovingAverages(Sensors.Reading reading)
        {
            movingAverage1 = movingAverage1 * irReadings1.Count;
            while (irReadings1.Count > 0 && irReadings1.Count >= frameRate * windowSize)
            {
                float val = irReadings1.Dequeue();
                movingAverage1 -= val;
            }
            movingAverage1 += reading.InfraredReflectance1;
            irReadings1.Enqueue(reading.InfraredReflectance1);
            movingAverage1 /= irReadings1.Count;

            movingAverage2 = movingAverage2 * irReadings2.Count;
            while (irReadings2.Count > 0 && irReadings2.Count >= frameRate * windowSize)
            {
                float val = irReadings2.Dequeue();
                movingAverage2 -= val;
            }
            movingAverage2 += reading.InfraredReflectance2;
            irReadings2.Enqueue(reading.InfraredReflectance2);
            movingAverage2 /= irReadings2.Count;
        }
コード例 #7
0
        public void UpdateWithReading(Sensors.Reading sensorReading, double timestamp = -1, bool secondary = false)
        {
            if (double.IsNaN(orientationEstimate.W) || double.IsNaN(orientationEstimate.X) || double.IsNaN(orientationEstimate.Y) || double.IsNaN(orientationEstimate.Z))
            {
                Reset();
            }
            if (!stopwatch.IsRunning)
            {
                stopwatch.Restart();
            }
            if (timestamp < 0)
            {
                timestamp = (double)stopwatch.ElapsedTicks / (double)Stopwatch.Frequency * 1000.0;
            }
            SensorInfo info = new SensorInfo()
            {
                Reading = sensorReading, Timestamp = timestamp
            };

            latestReading = info;

            if (Monitor.TryEnter(stopwatch))
            {
                double elapsedTime = (double)(info.Timestamp - lastReadingTimestamp);
                sampleFreq           = 1000.0f / elapsedTime;
                lastReadingTimestamp = info.Timestamp;
                double ax, ay, az, gx, gy, gz, mx, my, mz;
                //if (secondary)
                //{
                //    ax = sensorReading.Accelerometer2.X;
                //    ay = sensorReading.Accelerometer2.Y;
                //    az = sensorReading.Accelerometer2.Z;
                //    gx = sensorReading.Gyroscope2.X;
                //    gy = sensorReading.Gyroscope2.Y;
                //    gz = sensorReading.Gyroscope2.Z;
                //    mx = sensorReading.Magnetometer2.X;
                //    my = sensorReading.Magnetometer2.Y;
                //    mz = sensorReading.Magnetometer2.Z;
                //}
                //else
                {
                    ax = sensorReading.Accelerometer1.X;
                    ay = sensorReading.Accelerometer1.Y;
                    az = sensorReading.Accelerometer1.Z;
                    gx = sensorReading.Gyroscope1.X;
                    gy = sensorReading.Gyroscope1.Y;
                    gz = sensorReading.Gyroscope1.Z;
                    mx = sensorReading.Magnetometer1.X;
                    my = sensorReading.Magnetometer1.Y;
                    mz = sensorReading.Magnetometer1.Z;
                }

                MadgwickAHRSupdate(gx * RADIANS_PER_DEGREE,
                                   gy * RADIANS_PER_DEGREE,
                                   gz * RADIANS_PER_DEGREE,
                                   ax,
                                   ay,
                                   az,
                                   mx,
                                   my,
                                   mz);

                //if (!secondary)
                sensorReading.Orientation1 = orientationEstimate;
                //else
                //    sensorReading.Orientation2 = orientationEstimate;

                Monitor.Exit(stopwatch);
            }
        }
コード例 #8
0
 public SensorReadingEvent(Sensors.Reading reading)
     : base()
 {
     this.reading = reading;
     type         = "sensor_reading";
 }