void LoadCircleGestureDetector() { Stream recordStream = File.Open(circleKBPath, FileMode.OpenOrCreate); circleGestureRecognizer = new TemplatedGestureDetector("Circle", recordStream); circleGestureRecognizer.TraceTo(gesturesCanvas, Colors.Red); circleGestureRecognizer.OnGestureDetected += OnGestureDetected; templates.ItemsSource = circleGestureRecognizer.LearningMachine.Paths; }
void LoadStarGestureDetector() { Stream recordStream = File.Open(starKBPath, FileMode.OpenOrCreate); starGestureRecognizer = new TemplatedGestureDetector("Star", recordStream); starGestureRecognizer.TraceTo(gesturesCanvas, Colors.Blue); starGestureRecognizer.OnGestureDetected += OnGestureDetected; //templates.ItemsSource = starGestureRecognizer.LearningMachine.Paths; }
//new gesture Step7: create new method of Load Gesture Detector void LoadPigtailGestureDetector() { Stream recordStream = File.Open(pigtailKBPath, FileMode.OpenOrCreate); pigtailGestureRecognizer = new TemplatedGestureDetector("Pigtail", recordStream); pigtailGestureRecognizer.TraceTo(gesturesCanvas, Colors.Pink); pigtailGestureRecognizer.OnGestureDetected += OnGestureDetected; //templates.ItemsSource = pigtailGestureRecognizer.LearningMachine.Paths; }
void LoadCheckGestureDetector() { Stream recordStream = File.Open(checkKBPath, FileMode.OpenOrCreate); checkGestureRecognizer = new TemplatedGestureDetector("Check", recordStream); checkGestureRecognizer.TraceTo(gesturesCanvas, Colors.Green); checkGestureRecognizer.OnGestureDetected += OnGestureDetected; //templates.ItemsSource = checkGestureRecognizer.LearningMachine.Paths; }
void LoadCircleGestureDetector() { using (Stream recordStream = File.Open(circleKBPath, FileMode.OpenOrCreate)) { circleGestureRecognizer = new TemplatedGestureDetector("Circle", recordStream); circleGestureRecognizer.DisplayCanvas = gesturesCanvas; circleGestureRecognizer.OnGestureDetected += OnGestureDetected; MouseController.Current.ClickGestureDetector = circleGestureRecognizer; } }
public override void OnInitialize() { manager = GetManager<KinectManager>(); manager.AddGestureEvent(this); Stream gestureStream = typeof(GestureEvent).Assembly.GetManifestResourceStream(folder + GestureName + extention); //TODO null == gesture is if the file is not there. GestureDetector = new TemplatedGestureDetector(GestureName, gestureStream); GestureDetector.MinimalPeriodBetweenGestures = 0; GestureDetector.OnGestureDetected += detectedGesture; }
public KinectListener() { this.circleTemplate = Path.Combine(Environment.CurrentDirectory, "circle.tpl"); this.swipe = new SwipeGestureDetector(); this.swipe.OnGestureDetected += Swipe_OnGestureDetected; this.posture = new AlgorithmicPostureDetector(); this.posture.PostureDetected += Posture_PostureDetected; using (Stream stream = File.Open(this.circleTemplate, FileMode.OpenOrCreate)) { this.circle = new TemplatedGestureDetector("Circle", stream); this.circle.OnGestureDetected += Circle_OnGestureDetected; } }
/// <summary> /// Initializes the circle detector with knowledge bases for clockwise /// counter-clockwise motions. /// </summary> public CircleGestureDetector() { //initialize the streams for the two knowledge bases clockwisePath = System.IO.Path.Combine(Environment.CurrentDirectory, @"data\clockwise.save"); counterClockwisePath = System.IO.Path.Combine(Environment.CurrentDirectory, @"data\counterClockwise.save"); Stream clockwiseStream = File.Open(clockwisePath, FileMode.OpenOrCreate); Stream counterClockwiseStream = File.Open(counterClockwisePath, FileMode.OpenOrCreate); //initialize the detectors with the existing knowledge bases _clockwiseDetector = new TemplatedGestureDetector(KnownGestures.Clockwise.ToString(), clockwiseStream); _counterClockwiseDetector = new TemplatedGestureDetector(KnownGestures.CounterClockwise.ToString(), counterClockwiseStream); //associate gesture detection with function in this class _clockwiseDetector.OnGestureDetected += gestureDetected; _counterClockwiseDetector.OnGestureDetected += gestureDetected; }
//As you can see, the Initialize method is called when a Kinect sensor is connected. This method calls many methods described later in this chapter. //The following code is provided to initialize the TemplatedGestureDetector: void LoadGestureDetector() { using (Stream recordStream = File.Open(circleKBPath, FileMode.OpenOrCreate)) { circleGestureRecognizer = new TemplatedGestureDetector("Circle", recordStream); circleGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(leftHipAbductionKBPath, FileMode.OpenOrCreate)) using(Stream angleRecordStream = File.Open(leftHipAbductionAngleKBPath, FileMode.OpenOrCreate)) { leftHipAbductionGestureRecognizer = new AngularTemplatedGestureDetector("LeftHipAbduction", recordStream, angleRecordStream); leftHipAbductionGestureRecognizer.DisplayCanvas = gesturesCanvas; leftHipAbductionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(rightHipAbductionKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(rightHipAbductionAngleKBPath, FileMode.OpenOrCreate)) { rightHipAbductionGestureRecognizer = new AngularTemplatedGestureDetector("RightHipAbduction", recordStream, angleRecordStream); rightHipAbductionGestureRecognizer.DisplayCanvas = gesturesCanvas; rightHipAbductionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(leftHipExtensionKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(leftHipExtensionAngleKBPath, FileMode.OpenOrCreate)) { leftHipExtensionGestureRecognizer = new AngularTemplatedGestureDetector("LeftHipExtension", recordStream, angleRecordStream); leftHipExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; leftHipExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(rightHipExtensionKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(rightHipExtensionAngleKBPath, FileMode.OpenOrCreate)) { rightHipExtensionGestureRecognizer = new AngularTemplatedGestureDetector("RightHipExtension", recordStream, angleRecordStream); rightHipExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; rightHipExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(leftKneeExtensionKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(leftKneeExtensionAngleKBPath, FileMode.OpenOrCreate)) { leftKneeExtensionGestureRecognizer = new AngularTemplatedGestureDetector("LeftKneeExtension", recordStream, angleRecordStream); leftKneeExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; leftKneeExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(rightKneeExtensionKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(rightKneeExtensionAngleKBPath, FileMode.OpenOrCreate)) { rightKneeExtensionGestureRecognizer = new AngularTemplatedGestureDetector("RightKneeExtension", recordStream, angleRecordStream); rightKneeExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; rightKneeExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(standKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(standAngleKBPath, FileMode.OpenOrCreate)) { standGestureRecognizer = new AngularTemplatedGestureDetector("Stand", recordStream, angleRecordStream); standGestureRecognizer.DisplayCanvas = gesturesCanvas; standGestureRecognizer.OnGestureDetected += OnGestureDetected; } using (Stream recordStream = File.Open(sitKBPath, FileMode.OpenOrCreate)) using (Stream angleRecordStream = File.Open(sitAngleKBPath, FileMode.OpenOrCreate)) { sitGestureRecognizer = new AngularTemplatedGestureDetector("Sit", recordStream, angleRecordStream); sitGestureRecognizer.DisplayCanvas = gesturesCanvas; sitGestureRecognizer.OnGestureDetected += OnGestureDetected; } //using (Stream recordStream = File.Open(leftHipAbductionKBPath, FileMode.OpenOrCreate)) //{ // leftHipAbductionGestureRecognizer = new TemplatedGestureDetector("LeftHipAbduction", recordStream); // leftHipAbductionGestureRecognizer.DisplayCanvas = gesturesCanvas; // leftHipAbductionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(rightHipAbductionKBPath, FileMode.OpenOrCreate)) //{ // rightHipAbductionGestureRecognizer = new TemplatedGestureDetector("RightHipAbduction", recordStream); // rightHipAbductionGestureRecognizer.DisplayCanvas = gesturesCanvas; // rightHipAbductionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(leftHipExtensionKBPath, FileMode.OpenOrCreate)) //{ // leftHipExtensionGestureRecognizer = new TemplatedGestureDetector("LeftHipExtension", recordStream); // leftHipExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; // leftHipExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(rightHipExtensionKBPath, FileMode.OpenOrCreate)) //{ // rightHipExtensionGestureRecognizer = new TemplatedGestureDetector("RightHipExtension", recordStream); // rightHipExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; // rightHipExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(standKBPath, FileMode.OpenOrCreate)) //{ // standGestureRecognizer = new TemplatedGestureDetector("Stand", recordStream); // standGestureRecognizer.DisplayCanvas = gesturesCanvas; // standGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(sitKBPath, FileMode.OpenOrCreate)) //{ // sitGestureRecognizer = new TemplatedGestureDetector("Sit", recordStream); // sitGestureRecognizer.DisplayCanvas = gesturesCanvas; // sitGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(leftKneeExtensionKBPath, FileMode.OpenOrCreate)) //{ // leftKneeExtensionGestureRecognizer = new TemplatedGestureDetector("LeftKneeExtension", recordStream); // leftKneeExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; // leftKneeExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} //using (Stream recordStream = File.Open(rightKneeExtensionKBPath, FileMode.OpenOrCreate)) //{ // rightKneeExtensionGestureRecognizer = new TemplatedGestureDetector("RightKneeExtension", recordStream); // rightKneeExtensionGestureRecognizer.DisplayCanvas = gesturesCanvas; // rightKneeExtensionGestureRecognizer.OnGestureDetected += OnGestureDetected; //} }
// Gestures private void InitializeGestures() { swipeGestureRecognizer = new SwipeGestureDetector(); swipeGestureRecognizer.OnGestureDetected += OnGestureDetected; using (Stream recordStream = File.Open("../../circleKB.save", FileMode.Open)) { circleGestureRecognizer = new TemplatedGestureDetector("Circle", recordStream); circleGestureRecognizer.OnGestureDetected += OnGestureDetected; } }
/// <summary> /// Called at the start when the window is loaded /// </summary> private void InitializeKinect() { using (Stream recordStream = File.Open(@"circleKB.save", FileMode.OpenOrCreate)) { this.circleDetector = new TemplatedGestureDetector("Circle", recordStream); this.circleDetector.DisplayCanvas = videoCanvas; this.circleDetector.OnGestureDetected += OnHandGesture; } this.gestureDetector = new SwipeGestureDetector(); this.gestureDetector.DisplayCanvas = videoCanvas; this.gestureDetector.OnGestureDetected += OnHandGesture; ParallelCombinedGestureDetector parallelCombinedGestureDetector = new ParallelCombinedGestureDetector(); parallelCombinedGestureDetector.OnGestureDetected += OnHandGesture; parallelCombinedGestureDetector.DisplayCanvas = videoCanvas; parallelCombinedGestureDetector.Add(circleDetector); parallelCombinedGestureDetector.Add(gestureDetector); foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { this.kinectSensor = potentialSensor; break; } } if (null != this.kinectSensor) { // Turning on skeleton stream this.kinectSensor.SkeletonStream.Enable(); this.kinectSensor.SkeletonFrameReady += this.SensorSkeletonFrameReady; // Turn on the color stream to receive color frames this.kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); // Allocate space to put the pixels we'll receive this.colorPixels = new byte[this.kinectSensor.ColorStream.FramePixelDataLength]; // This is the bitmap we'll display on-screen this.colorBitmap = new WriteableBitmap(this.kinectSensor.ColorStream.FrameWidth, this.kinectSensor.ColorStream.FrameHeight, 96.0, 96.0, PixelFormats.Bgr32, null); // Set the image we display to point to the bitmap where we'll put the image data this.Image.Source = this.colorBitmap; // Add an event handler to be called whenever there is new color frame data this.kinectSensor.ColorFrameReady += this.SensorColorFrameReady; this.kinectSensor.Start(); } if (null == this.kinectSensor) { // Connection is failed return; } this.speech = new Speech(this.kinectSensor, grammar, this); this.speech.Start(); }