/// <summary> /// Convert raw skeletal structure into one we can visualize (with window coordinates) and process in Behavior. /// The result is a list of seven VisualizableSkeletonInformation objects, some having IsSkeletonActive=true and other properties filled. /// </summary> /// <returns>CCR Iterator</returns> public IEnumerator <ITask> ProcessSkeletons() { // while calculating the frame, we operate on a freshly allocated list of seven skeletons: AllocateSkeletonsTemp(); int skeletonIndex = 0; int mainSkeletonIndex = -1; double minSkelDistance = double.MaxValue; foreach (Skeleton skel in this.RawFrames.RawSkeletonFrameData.SkeletonData) { VisualizableSkeletonInformation vsi = this.SkeletonsTemp[skeletonIndex]; vsi.IsSkeletonActive = false; vsi.SkeletonQuality = string.Empty; vsi.SkeletonPose = SkeletonPose.None; // skeleton is tracked, head and jointTypeOfInterest clearly visible: if (SkeletonTrackingState.Tracked == skel.TrackingState && skel.Joints[JointType.Head].TrackingState == JointTrackingState.Tracked && skel.Joints[jointTypeOfInterest].TrackingState == JointTrackingState.Tracked) { vsi.IsSkeletonActive = true; vsi.SkeletonQuality = skel.ClippedEdges.ToString(); vsi.TrackingId = skel.TrackingId; // see http://msdn.microsoft.com/en-us/library/jj131025.aspx#Active_User_Tracking if (skel.Joints[JointType.FootLeft].TrackingState == JointTrackingState.Tracked && skel.Joints[JointType.FootRight].TrackingState == JointTrackingState.Tracked) { vsi.SkeletonSizeMeters = skel.Joints[JointType.Head].Position.Y - (skel.Joints[JointType.FootLeft].Position.Y + skel.Joints[JointType.FootRight].Position.Y) / 2.0d + 0.08d; // plus head diameter } vsi.detectSkeletonPose(skel); // fills vsi.SkeletonPose // Populate joint poitns and compute Pan Tilt and DistanceMeters: foreach (Joint joint in skel.Joints) { yield return(new IterativeTask <Joint>(joint, this.JointToPointCoordinates)); vsi.JointPoints[joint.JointType].JointCoordinates = this.cachedJointPoint; vsi.JointPoints[joint.JointType].TrackingState = joint.TrackingState; VisualizableJoint vj = vsi.JointPoints[joint.JointType]; vj.JointCoordinates = this.cachedJointPoint; vj.TrackingState = joint.TrackingState; vj.X = skel.Joints[joint.JointType].Position.X / KinectNykoGlassesFactor; vj.Y = skel.Joints[joint.JointType].Position.Y / KinectNykoGlassesFactor; vj.Z = skel.Joints[joint.JointType].Position.Z / KinectNykoGlassesFactor; vj.ComputePanTilt(); if (joint.JointType == jointTypeOfInterest) { vj.IsJointOfInterest = true; vsi.DistanceMeters = vj.Z; if (vsi.DistanceMeters < minSkelDistance) { minSkelDistance = vsi.DistanceMeters; mainSkeletonIndex = skeletonIndex; } } } if (skeletonIndex < _state.HumanInteractionStates.Length) { HumanInteractionState his = _state.HumanInteractionStates[skeletonIndex]; VisualizableJoint jointOfInterest = vsi.JointPoints[jointTypeOfInterest]; his.IsTracked = true; his.TrackingId = skel.TrackingId; his.IsMain = false; his.TimeStamp = DateTime.Now; his.DirectionPan = jointOfInterest.Pan; his.DirectionTilt = jointOfInterest.Tilt; his.DistanceMeters = vsi.DistanceMeters; } } else { if (skeletonIndex < _state.HumanInteractionStates.Length) { HumanInteractionState his = _state.HumanInteractionStates[skeletonIndex]; his.IsTracked = false; his.IsMain = false; } } skeletonIndex++; } if (mainSkeletonIndex >= 0) { VisualizableSkeletonInformation vsi = this.SkeletonsTemp[mainSkeletonIndex]; vsi.IsMainSkeleton = true; if (mainSkeletonIndex < _state.HumanInteractionStates.Length) { HumanInteractionState his = _state.HumanInteractionStates[mainSkeletonIndex]; his.IsMain = true; } } // make the result available for outside consumption: AllSkeletons = SkeletonsTemp; yield break; }
private void StrategyPersonFollowing() { haveATargetNow = false; bool haveSkeleton = false; bool lostSkeletons = false; DateTime Now = DateTime.Now; //kinect.JointType targetJointType = kinect.JointType.HandLeft; kinect.JointType targetJointType = kinect.JointType.Spine; setCurrentGoalDistance(null); // measured value, best case is distance to skeleton, can be null if we completely lost target, or assumed to be 5 meters for red shirt. FollowDirectionTargetDistanceToGoalMeters = TargetDistanceToGoalMeters; // desired value. We want to stop at this distance to human and keep him in front of the robot. SetLightsTrackingSkeleton(false); SetLightsTrackingRedShirt(false); if (!_mapperVicinity.robotState.ignoreKinectSkeletons) { var tmpAllSkeletons = frameProcessor.AllSkeletons; // get a snapshot of the pointer to allocated array, and then take sweet time processing it knowing it will not change var skels = from s in tmpAllSkeletons where s.IsSkeletonActive && s.JointPoints[targetJointType].TrackingState == kinect.JointTrackingState.Tracked orderby s.JointPoints[targetJointType].Z select s; int skelsCount = skels.Count(); if (skelsCount != skelsCountPrev) { int deltaSkelsCount = skelsCount - skelsCountPrev; skelsCountPrev = skelsCount; //if (deltaSkelsCount < 0) //{ // if ((Now - lastAmazing).TotalSeconds > 10.0d) // { // lastAmazing = Now; // _soundsHelper.PlaySound("you were amazing", 0.5d); // } //} //else //{ // _soundsHelper.PlaySound("skeletons number changed", 0.2d); //} //talkerToHuman.ensureAnnouncementDelay(); if (skelsCount > 0) { frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos; // snap a picture //_mainWindow.PlayRandomSound(); //talkerToHuman.Say(9, "" + skelsCount + " tasty human" + (skelsCount > 1 ? "s" : "")); HeadlightsOff(); } else { lostSkeletons = true; } } if (skelsCount > 0) { haveSkeleton = true; #region Have a skeleton, follow it lastHadSkeletons = Now; // found the first skeleton; track it: VisualizableSkeletonInformation vsi = skels.FirstOrDefault(); if (vsi == null) { // this really, really should not happen, especially now when we allocate frameProcessor.AllSkeletons for every frame. Tracer.Error("StrategyPersonFollowing() vsi == null"); return; } VisualizableJoint targetJoint = vsi.JointPoints[targetJointType]; //bool isSkeletonActive = vsi.IsSkeletonActive; always true SkeletonPose skeletonPose = vsi.SkeletonPose; // when changed, announce pose and react to it: ReactOnSkeletonPose(skeletonPose); // Warning: VisualizableJoint::ComputePanTilt() can set Pan or Tilt to NaN if (targetJoint != null && !double.IsNaN(targetJoint.Pan) && !double.IsNaN(targetJoint.Tilt)) { haveATargetNow = true; SetLightsTrackingSkeleton(true); double targetPanRelativeToRobot = _state.currentPanKinect + targetJoint.Pan; double targetPanRelativeToHead = targetJoint.Pan; //Tracer.Trace("================== currentPanKinect=" + _state.currentPanKinect + " targetJoint.Pan=" + targetJoint.Pan + " targetPanRelativeToRobot=" + targetPanRelativeToRobot); // guns rotate (pan) with Kinect, but tilt independently of Kinect. They are calibrated when Kinect tilt = 0 targetPan = targetPanRelativeToHead; targetTilt = targetJoint.Tilt + _state.currentTiltKinect; double kinectTurnEstimate = targetPanRelativeToRobot - _state.currentPanKinect; bool shouldTurnKinect = Math.Abs(kinectTurnEstimate) > smallMovementsAngleTreshold; // don't follow small movements SetDesiredKinectPlatformPan(shouldTurnKinect ? (double?)targetPanRelativeToRobot : null); // will be processed in computeAndExecuteKinectPlatformTurn() when head turn measurement comes. setPanTilt(targetPan, targetTilt); double distanceToHumanMeters = targetJoint.Z; // actual distance from Kinect to human bool tooCloseToHuman = distanceToHumanMeters < TargetDistanceToGoalMeters - 0.1d; // cannot shoot, likely backing up bool veryCloseToHuman = distanceToHumanMeters < TargetDistanceToGoalMeters + 0.1d; // can talk to human, likely in the dead zone and not moving #region Greet the Human if (veryCloseToHuman && talkerToHuman.canTalk()) { frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos; // snap a picture talkerToHuman.TalkToHuman(); } #endregion // Greet the Human #region Shoot the Human if (skeletonPose == shootingPose) { if (!tooCloseToHuman) { //lock (shootingPoseLock) //{ if (!shotAtHuman && (Now - lastShotAtHuman).TotalSeconds > 2.0d) { lastShotAtHuman = Now; shotAtHuman = true; talkerToHuman.Say(9, "good boy"); SpawnIterator(ShootGunOnce); } //} } } else { shotAtHuman = false; } #endregion // Shoot the Human ComputeMovingVelocity(distanceToHumanMeters, targetPanRelativeToRobot, 0.25d, 10.0d); } // else // { // // we have skeleton(s) but the target joint is not visible. What to do here? // } #endregion // Have a skeleton, follow it } else if ((Now - lastHadSkeletons).TotalSeconds < 1.0d) { return; // may be just temporary loss of skeletons, wait a little before switching to red shirt } } // end ignoreKinectSkeletons if (!_mapperVicinity.robotState.ignoreRedShirt && !haveSkeleton && frameProcessor.videoSurveillanceDecider != null) { #region Have a red shirt, follow it VideoSurveillanceTarget target = frameProcessor.videoSurveillanceDecider.mainColorTarget; if (target != null && (Now - target.TimeStamp).TotalSeconds < 0.5d) // must also be recent { lastHadRedShirt = Now; haveATargetNow = true; SetLightsTrackingRedShirt(true); double targetPanRelativeToRobot = target.Pan; // already adjusted for currentPanKinect //Tracer.Trace("+++++++++++++++ currentPanKinect=" + _state.currentPanKinect + " target.Pan=" + target.Pan + " targetPanRelativeToRobot=" + targetPanRelativeToRobot); //Tracer.Trace(" target.Pan=" + target.Pan + " Tilt=" + target.Tilt); // guns rotate (pan) with Kinect, but tilt independently of Kinect. They are calibrated when Kinect tilt = 0 targetPan = targetPanRelativeToRobot - _state.currentPanKinect; targetTilt = target.Tilt; // currentTiltKinect already accounted for by VideoSurveillance //Tracer.Trace("+++++++++++++++ currentTiltKinect=" + _state.currentTiltKinect + " target.Tilt=" + target.Tilt + " targetTilt=" + targetTilt); //if((DateTime.Now - lastTurnedKinectPlatform).TotalSeconds > 1.0d) { lastTurnedKinectPlatform = DateTime.Now; double kinectTurnEstimate = targetPan; // targetPanRelativeToRobot - _state.currentPanKinect; bool shouldTurnKinect = Math.Abs(kinectTurnEstimate) > smallMovementsAngleTreshold; // don't follow small movements SetDesiredKinectPlatformPan(shouldTurnKinect ? (double?)targetPanRelativeToRobot : null); // will be processed in computeAndExecuteKinectPlatformTurn() when head turn measurement comes. } //Tracer.Trace(string.Format(" targetPan={0:0.00} Tilt={1:0.00} PanKinect={2:0.00}", targetPan, targetTilt, _state.currentPanKinect)); setPanTilt(targetPan, targetTilt); double bestKinectTilt = targetTilt; // will be limited to +-27 degrees SetDesiredKinectTilt(bestKinectTilt); // choose robotTacticsType - current tactics is move towards human: var mostRecentParkingSensor = _state.MostRecentParkingSensor; double redShirtDistanceMetersEstimated = mostRecentParkingSensor == null ? TargetDistanceToGoalMeters : Math.Min(mostRecentParkingSensor.parkingSensorMetersLF, mostRecentParkingSensor.parkingSensorMetersRF); //Tracer.Trace("redShirtDistanceEstimated = " + redShirtDistanceMetersEstimated); ComputeMovingVelocity(redShirtDistanceMetersEstimated, targetPanRelativeToRobot, 0.35d, 10.0d); if (_mapperVicinity.robotState.robotTacticsType == RobotTacticsType.None && Math.Abs(redShirtDistanceMetersEstimated - TargetDistanceToGoalMeters) < 0.35d && Math.Abs(targetPan) < 10.0d && (DateTime.Now - lastGunsFiredOnRed).TotalSeconds > 5.0d) { lastGunsFiredOnRed = Now; //talkerToHuman.Say(9, "red shirt"); SpawnIterator(ShootGunOnce); } if (!hadATarget || lostSkeletons) // just acquired target, or lost all Skeletons { frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos; // snap a picture //talkerToHuman.Say(9, "red shirt"); //nextAnnouncementDelay = _soundsHelper.Announce("$lady in red", nextAnnouncementDelayDefault, 0.05d); //nextAnnouncementDelay = _soundsHelper.Announce("red shirt", nextAnnouncementDelayDefault, 0.05d); talkerToHuman.rewindDialogue(); } } else { if (target == null) { Tracer.Trace("----------------- no main color target"); } else { Tracer.Trace("----------------- main color target too old at " + (Now - target.TimeStamp).TotalSeconds + " sec"); } } #endregion // Have a red shirt, follow it } // end ignoreRedShirt else if ((Now - lastHadRedShirt).TotalSeconds < 1.0d) { _mapperVicinity.robotDirection.bearing = null; // indication for tactics to compute collisions and stop. return; // may be just temporary loss of red shirt, wait a little before switching to sound } else if (!haveSkeleton && !_mapperVicinity.robotState.ignoreKinectSounds) { // we let voice recognizer have control for several seconds, if we can't track skeleton or red shirt anyway. if ((Now - lastVoiceLocalized).TotalSeconds > 5.0d) { // choose robotTacticsType - current tactics is Stop: _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None; } } if (!haveATargetNow) { // no target means stopping PerformAvoidCollision(null, 1.0d); // just in case setCurrentGoalDistance(null); _mapperVicinity.robotDirection.bearing = null; // indication for tactics to compute collisions and stop. _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None; StopMoving(); _state.MovingState = MovingState.Unable; _state.Countdown = 0; // 0 = immediate response } if (hadATarget && !haveATargetNow) { lastLostTargets = Now; secondsSinceLostTargetLast = -1; haveATargetNowState = 0; if ((Now - lastThanksForStoppingBy).TotalSeconds > 60.0d) { lastThanksForStoppingBy = Now; talkerToHuman.Say(9, "thanks for stopping by!"); } //talkerToHuman.Say(9, "lost all humans"); //string messageToSay = "$lost all humans"; //nextAnnouncementDelay = _soundsHelper.Announce(messageToSay, nextAnnouncementDelayDefault, 0.1d); talkerToHuman.rewindDialogue(); lastTargetPanSwitch = 0; StartHeadAnimationCombo(HeadComboAnimations.Restpose, false); AddHeadAnimationCombo(HeadComboAnimations.BlinkCycle, true, 0.4d); } hadATarget = haveATargetNow; // set flag for the next cycle #region Target Lost Routine if (!haveATargetNow) { if (_mapperVicinity.robotState.doLostTargetRoutine) { // after losing targets, rotate both directions for a while, and then stop and wait: int secondsSinceLostTarget = (int)Math.Round((Now - lastLostTargets).TotalSeconds); if (secondsSinceLostTarget != secondsSinceLostTargetLast) { // we come here once every second when the target is not in view. secondsSinceLostTargetLast = secondsSinceLostTarget; if (secondsSinceLostTarget <= 30) { HeadlightsOn(); double tmpPanKinect = 0.0d; switch (secondsSinceLostTarget) { case 0: case 1: // stop for now: setCurrentGoalDistance(null); _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None; SetDesiredKinectTilt(3.0d); return; case 2: case 3: case 4: if (haveATargetNowState != 1) { tmpPanKinect = 50.0d * Math.Sign(targetPan); Tracer.Trace("setPanTilt() 1 Kinect pan=" + tmpPanKinect); SetDesiredKinectPlatformPan(tmpPanKinect); setGunsParked(); haveATargetNowState = 1; talkerToHuman.Say(9, "One"); } break; case 5: case 6: case 7: if (haveATargetNowState != 2) { Tracer.Trace("setPanKinect() 2 Kinect pan=0"); SetDesiredKinectPlatformPan(0.0d); haveATargetNowState = 2; talkerToHuman.Say(9, "Two"); } break; case 8: case 9: case 10: if (haveATargetNowState != 3) { tmpPanKinect = -50.0d * Math.Sign(targetPan); Tracer.Trace("setPanKinect() 3 Kinect pan=" + tmpPanKinect); SetDesiredKinectPlatformPan(tmpPanKinect); haveATargetNowState = 3; talkerToHuman.Say(9, "Three"); } break; case 11: case 12: if (haveATargetNowState != 4) { Tracer.Trace("setPanKinect() 4 Kinect pan=0"); SetDesiredKinectPlatformPan(0.0d); haveATargetNowState = 4; talkerToHuman.Say(9, "Four"); } break; } if (secondsSinceLostTarget > 12 && secondsSinceLostTarget % 6 == 0 && lastTargetPanSwitch != secondsSinceLostTarget) { lastTargetPanSwitch = secondsSinceLostTarget; targetPan = -targetPan; // switch rotation direction every 6 seconds Tracer.Trace("setPanKinect() 5 Kinect pan=0"); talkerToHuman.Say(9, "Switch"); SetDesiredKinectPlatformPan(0.0d); } setCurrentGoalBearingRelativeToRobot(60.0d * Math.Sign(targetPan)); // keep in the same direction where the target last was, aiming at 60 degrees for a steep turn in place // choose robotTacticsType - rotate towards where the target was last seen: setCurrentGoalDistance(TargetDistanceToGoalMeters); FollowDirectionMaxVelocityMmSec = MinimumForwardVelocityMmSec; // ;ModerateForwardVelocityMmSec _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.FollowDirection; } else { // stop, sing a song and wait for a target to appear: FollowDirectionMaxVelocityMmSec = 0.0d; setCurrentGoalDistance(null); _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None; haveATargetNowState = 0; int lonelyPlayTime = 180; // the song is 2:40 - give it 3 minutes to play if (secondsSinceLostTarget % 20 == 0 && (lastWaitingForHumansAnnounced == 0 || lastWaitingForHumansAnnounced == secondsSinceLostTarget - lonelyPlayTime)) { lastWaitingForHumansAnnounced = secondsSinceLostTarget; //talkerToHuman.Say(9, "waiting for humans"); _soundsHelper.Announce("$lonely", 5.0d, 0.05d); // play "I-am-Mr-Lonely.mp3" really quietly talkerToHuman.rewindDialogue(); lastWaitingForHumansAnnounced = 0; HeadlightsOff(); } Tracer.Trace("secondsSinceLostTarget=" + secondsSinceLostTarget); if (secondsSinceLostTarget % 10 == 0) { Tracer.Trace("setPanKinect() 5 Kinect pan=" + panKinectSearchAngles[panKinectSearchIndex]); SetDesiredKinectTilt(3.0d); SetDesiredKinectPlatformPan(panKinectSearchAngles[panKinectSearchIndex++]); if (panKinectSearchIndex >= panKinectSearchAngles.Length) { panKinectSearchIndex = 0; } } } } } else // !doLostTargetRoutine { // just assume safe position and wait till a new target appears in front of the camera: HeadlightsOff(); if ((DateTime.Now - lastLostTargets).TotalSeconds > 3.0d) { SafePosture(); } // stop for now: setCurrentGoalDistance(null); _mapperVicinity.robotDirection.bearing = null; _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None; } } #endregion // Target Lost Routine }