public ControlWindow()
        {
            InitializeComponent();

            servoManager = new ServoManager(servoConfigFileName);
            servoManager.ConnectToHardware();
            UpdateConnectedTextblock(servoManager.IsConnected(), servoHardwareState);

            ariaManager = new AriaManager();
            ariaManager.InitializeAria();
            UpdateConnectedTextblock(ariaManager.IsConnected(), ariaHardwareState);

            robotSpeech = new RobotSpeech(servoManager);

            robotEyes = new RobotEyes();
            robotEyes.InitializeHardware();

            personTracking = new PersonTracking(servoManager, ariaManager);

            sequenceProcessor = new SequenceProcessor(servoManager, ariaManager, robotSpeech, robotEyes, sequenceFileName);

            kinectManager = new KinectManager(sequenceProcessor, personTracking, robotSpeech);
            kinectManager.InitializeKinect();
            UpdateConnectedTextblock(kinectManager.IsConnected(), kinectHardwareState);
            UpdateMotionEnabledDisplay();

            logUpdateTimer.Tick += new EventHandler(logUpdateTimer_Tick);
            logUpdateTimer.Interval = new TimeSpan(0, 0, 0, 0, 500);
            logUpdateTimer.Start();
        }
Beispiel #2
0
        public ControlWindow()
        {
            InitializeComponent();

            servoManager = new ServoManager(servoConfigFileName);
            servoManager.ConnectToHardware();
            UpdateConnectedTextblock(servoManager.IsConnected(), servoHardwareState);

            ariaManager = new AriaManager();
            ariaManager.InitializeAria();
            UpdateConnectedTextblock(ariaManager.IsConnected(), ariaHardwareState);

            robotSpeech = new RobotSpeech(servoManager);

            robotEyes = new RobotEyes();
            robotEyes.InitializeHardware();

            personTracking = new PersonTracking(servoManager, ariaManager);

            sequenceProcessor = new SequenceProcessor(servoManager, ariaManager, robotSpeech, robotEyes, sequenceFileName);

            kinectManager = new KinectManager(sequenceProcessor, personTracking, robotSpeech);
            kinectManager.InitializeKinect();
            UpdateConnectedTextblock(kinectManager.IsConnected(), kinectHardwareState);
            UpdateMotionEnabledDisplay();

            logUpdateTimer.Tick    += new EventHandler(logUpdateTimer_Tick);
            logUpdateTimer.Interval = new TimeSpan(0, 0, 0, 0, 500);
            logUpdateTimer.Start();
        }
        private void InitializeHardware_Click(object sender, RoutedEventArgs e)
        {
            servoManager.LoadServoConfiguration(servoConfigFileName, false);

            servoManager.ConnectToHardware();
            UpdateConnectedTextblock(servoManager.IsConnected(), servoHardwareState);

            kinectManager.InitializeKinect();
            UpdateConnectedTextblock(kinectManager.IsConnected(), kinectHardwareState);
        }
        // Performs the sequence by stepping through each frame and executing the required
        // voice synthesizer, delay, and motion actions.
        private void RunSequenceThread()
        {
            if (!servoManager.IsConnected())
            {
                ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "Servo hardware is disconnected, running sequence '" + runningSequence.name + "' in simulation mode.");
            }

            // Disable the person tracking feature while a sequence is executing.
            servoManager.PersonTrackingEnable(false);

            ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "RunSequence(): sequence '" + runningSequence.name + "' started.");

            foreach (Frame frame in runningSequence.GetFrames())
            {
                // Run the speech synthesizer asynchronously. Speaks while continuing to
                // perform moves in this and subsequent frames.
                if (frame.speechString != null)
                {
                    speechSynthesizer.SpeakAsync(frame.speechString);
                }

                // Go to the relevant PPT Slide
                // perform moves in this and subsequent frames.
                if (frame.pptIndex != null)
                {
                    if (pptController.IsPptActive())
                    {
                        pptController.GoToSlide(frame.pptIndex);
                    }
                }

                // Wait for the specified amount of time.
                if (frame.delay > 0)
                {
                    Thread.Sleep((int)(frame.delay * 1000));
                }

                // Move all servos in the frame list.
                if (frame.GetServoPositions().Count > 0)
                {
                    servoManager.MoveServos(frame.GetServoPositions(), frame.timeToDestination);
                }
            }

            ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "RunSequence(): sequence '" + runningSequence.name + "' completed.");

            // Re-enable the person tracking feature.
            servoManager.PersonTrackingEnable(true);

            lock (sequenceLock)
            {
                sequenceIsRunning = false;
            }
        }
        public ControlWindow()
        {
            InitializeComponent();

            servoManager = new ServoManager(servoConfigFileName);
            servoManager.ConnectToHardware();
            UpdateConnectedTextblock(servoManager.IsConnected(), servoHardwareState);

            sequenceProcessor = new SequenceProcessor(servoManager, sequenceFileName);

            kinectManager = new KinectManager(sequenceProcessor);
            kinectManager.InitializeKinect();
            UpdateConnectedTextblock(kinectManager.IsConnected(), kinectHardwareState);

            logUpdateTimer.Tick    += new EventHandler(logUpdateTimer_Tick);
            logUpdateTimer.Interval = new TimeSpan(0, 0, 0, 0, 500);
            logUpdateTimer.Start();
        }
        public ControlWindow()
        {
            InitializeComponent();

            servoManager = new ServoManager(servoConfigFileName);
            servoManager.ConnectToHardware();
            UpdateConnectedTextblock(servoManager.IsConnected(), servoHardwareState);

            sequenceProcessor = new SequenceProcessor(servoManager, sequenceFileName);

            kinectManager = new KinectManager(sequenceProcessor);
            kinectManager.InitializeKinect();
            UpdateConnectedTextblock(kinectManager.IsConnected(), kinectHardwareState);

            logUpdateTimer.Tick += new EventHandler(logUpdateTimer_Tick);
            logUpdateTimer.Interval = new TimeSpan(0, 0, 0, 0, 500);
            logUpdateTimer.Start();
        }
Beispiel #7
0
        // Performs the sequence by stepping through each frame and executing the required
        // voice synthesizer, delay, and motion actions.
        private void RunSequenceThread()
        {
            if (!servoManager.IsConnected())
            {
                ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "Servo hardware is disconnected, running servos in simulation mode for sequence '" + runningSequence.name + "'.");
            }

            if (!ariaManager.IsConnected())
            {
                ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "Robot base hardware is disconnected, running robot base in simulation mode for sequence '" + runningSequence.name + "'.");
            }

            ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "RunSequenceThread(): sequence '" + runningSequence.name + "' started.");

            foreach (Frame frame in runningSequence.GetFrames())
            {
                // Run the speech synthesizer asynchronously. Speaks while continuing to
                // perform moves in this and subsequent frames.
                if (frame.speechString != null)
                {
                    robotSpeech.Speak(frame.speechString);
                }

                // Set the state of the LED eyes.
                if (frame.eyeState != null)
                {
                    switch (frame.eyeState)
                    {
                    case "Open":
                        robotEyes.SetEyeState(true, true);
                        break;

                    case "Closed":
                        robotEyes.SetEyeState(false, false);
                        break;

                    case "LeftClosed":
                        robotEyes.SetEyeState(false, true);
                        break;

                    case "RightClosed":
                        robotEyes.SetEyeState(true, false);
                        break;
                    }
                }

                // Wait for the specified amount of time.
                if (frame.delay > 0)
                {
                    Thread.Sleep((int)(frame.delay * 1000));
                }


                // Servo motion and wheel motion are performed simultaneously by creating two separate threads and
                // waiting until both threads have completed their motion.

                ManualResetEvent[] manualEvents = new ManualResetEvent[]
                {
                    new ManualResetEvent(false),
                    new ManualResetEvent(false)
                };

                // Start servo motion thread.
                Thread servoThread = new Thread(new ParameterizedThreadStart(MoveServos));
                servoThread.Start(new SequenceThreadData(frame, manualEvents[0]));

                // Start wheel motion thread.
                Thread wheelsThread = new Thread(new ParameterizedThreadStart(MoveWheels));
                wheelsThread.Start(new SequenceThreadData(frame, manualEvents[1]));

                // Wait until both servo motion and wheel motion has completed.
                WaitHandle.WaitAll(manualEvents);
            }

            ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Info, "RunSequenceThread(): sequence '" + runningSequence.name + "' completed.");

            lock (sequenceLock)
            {
                sequenceIsRunning = false;
            }
        }
Beispiel #8
0
        // Moves the robot's head/eyes to track the specified skeleton joint.
        public void UpdatePosition(SkeletonPoint targetPosition)
        {
            // Only update if tracking is allowed and if we're not already updating in a
            // different thread.
            lock (personTrackingLock)
            {
                if (!trackingEnabled)
                {
                    return;
                }

                if (isAlreadyUpdatingTracking)
                {
                    return;
                }
                else
                {
                    isAlreadyUpdatingTracking = true;
                }
            }

            // Only update once every 100ms, even if we're receiving new joint positions
            // more frequently than this.
            TimeSpan timeBetweenUpdates = new TimeSpan(0, 0, 0, 0, 100);

            if (DateTime.Now - lastUpdateTime < timeBetweenUpdates)
            {
                lock (personTrackingLock)
                {
                    isAlreadyUpdatingTracking = false;
                }

                return;
            }

            lastUpdateTime = DateTime.Now;

            // Position of the robot neck and eye servos at which the robot faces straight forward.
            const double servoCenterPostion_HeadHorizontal = 1550;
            const double servoCenterPostion_HeadVertical   = 2000;
            const double servoCenterPostion_Eyes           = 1600;

            const double servoOffset_HeadVertical = 0;

            // Number of servo position increments per radian of rotation for each of the servos.
            const double servoIncrementsPerRadian_HeadHorizontal = 800;
            const double servoIncrementsPerRadian_HeadVertical   = 1000;
            const double servoIncrementsPerRadian_Eyes           = 800;

            // Tracking speed of each servo.
            const long servoSpeed_HeadHorizontal = 30;
            const long servoSpeed_HeadVertical   = 30;
            const long servoSpeed_Eyes           = 1000;

            // Wheeled base tracking parameters.
            const double baseRotationSpeed = 5;
            const double trackingThreshold = 0.1;

            if (servoManager.IsConnected())
            {
                servoManager.UpdateServoValues();
            }

            //Servo headHorizontalServo = servoList.Find(x => x.index == 10);
            //if (headHorizontalServo == null)
            //{
            //    lock (personTrackingLock)
            //    {
            //        isAlreadyUpdatingTracking = false;
            //    }

            //    // Log Error
            //    return;
            //}

            // Calculate the current position of the head.
            //double currentPosition_HeadHorizontal = headHorizontalServo.polledPosition / 4.0;
            //double currentAngle_HeadHorizontal = (servoCenterPostion_HeadHorizontal - currentPosition_HeadHorizontal) / servoIncrementsPerRadian_HeadHorizontal;

            // Calculate the angle to the target joint.
            double targetAngle_Horizontal = Math.Atan(targetPosition.X / targetPosition.Z);
            double targetAngle_Vertical   = Math.Atan(targetPosition.Y / targetPosition.Z);

            // Calculate the new head position to face this target joint.
            double newServoPosition_HeadHorizontal = servoCenterPostion_HeadHorizontal - targetAngle_Horizontal * servoIncrementsPerRadian_HeadHorizontal;
            double newServoPosition_HeadVertical   = servoCenterPostion_HeadVertical + servoOffset_HeadVertical - targetAngle_Vertical * servoIncrementsPerRadian_HeadVertical;

            // Eye position with head at center.
            double newServoPosition_Eyes = servoCenterPostion_Eyes + targetAngle_Horizontal * servoIncrementsPerRadian_Eyes;

            // Eye position based on current head position.
            //double newServoPosition_Eyes = servoCenterPostion_Eyes + (targetAngle_Horizontal - currentAngle_HeadHorizontal) * servoIncrementsPerRadian_Eyes;

            //ErrorLogging.AddMessage(ErrorLogging.LoggingLevel.Warning, "PersonTrackingUpdate: targetAngle_Horizontal = " + targetAngle_Horizontal + ", newServoPosition_HeadHorizontal = " + newServoPosition_HeadHorizontal + ", newServoPosition_Eyes = " + newServoPosition_Eyes);


            if (servoManager.IsConnected())
            {
                // Update with head motion.
                //servoManager.StartSpeedMove(new ServoPosition(10, newServoPosition_HeadHorizontal), servoSpeed_HeadHorizontal);
                //servoManager.StartSpeedMove(new ServoPosition(9, newServoPosition_HeadVertical), servoSpeed_HeadVertical);

                // Update with only eye motion.
                //StartSpeedMove(new ServoPosition(10, servoCenterPostion_HeadHorizontal), servoSpeed_HeadHorizontal);
                //StartSpeedMove(new ServoPosition(9, servoCenterPostion_HeadVertical), servoSpeed_HeadVertical);
                //StartSpeedMove(new ServoPosition(8, newServoPosition_Eyes), servoSpeed_Eyes);
            }

            if (ariaManager.IsConnected())
            {
                if (Math.Abs(targetAngle_Horizontal) > trackingThreshold)
                {
                    double trackingVelocity = 0;

                    if (targetAngle_Horizontal > 0)
                    {
                        trackingVelocity = baseRotationSpeed;
                    }
                    else
                    {
                        trackingVelocity = -baseRotationSpeed;
                    }

                    ariaManager.SetPersonTrackingRotation(trackingVelocity);

                    // Restart watchdog timer.
                    wheelWatchdogTimer.Stop();
                    wheelWatchdogTimer.Start();
                }
                else
                {
                    ariaManager.SetPersonTrackingRotation(0);
                    wheelWatchdogTimer.Stop();
                }
            }

            lock (personTrackingLock)
            {
                isAlreadyUpdatingTracking = false;
            }
        }