コード例 #1
0
        private void StrategyPersonFollowing()
        {
            haveATargetNow = false;
            bool     haveSkeleton  = false;
            bool     lostSkeletons = false;
            DateTime Now           = DateTime.Now;

            //kinect.JointType targetJointType = kinect.JointType.HandLeft;
            kinect.JointType targetJointType = kinect.JointType.Spine;

            setCurrentGoalDistance(null);                                           // measured value, best case is distance to skeleton, can be null if we completely lost target, or assumed to be 5 meters for red shirt.
            FollowDirectionTargetDistanceToGoalMeters = TargetDistanceToGoalMeters; // desired value. We want to stop at this distance to human and keep him in front of the robot.

            SetLightsTrackingSkeleton(false);
            SetLightsTrackingRedShirt(false);

            if (!_mapperVicinity.robotState.ignoreKinectSkeletons)
            {
                var tmpAllSkeletons = frameProcessor.AllSkeletons;  // get a snapshot of the pointer to allocated array, and then take sweet time processing it knowing it will not change

                var skels = from s in tmpAllSkeletons
                            where s.IsSkeletonActive && s.JointPoints[targetJointType].TrackingState == kinect.JointTrackingState.Tracked
                            orderby s.JointPoints[targetJointType].Z
                            select s;

                int skelsCount = skels.Count();

                if (skelsCount != skelsCountPrev)
                {
                    int deltaSkelsCount = skelsCount - skelsCountPrev;
                    skelsCountPrev = skelsCount;

                    //if (deltaSkelsCount < 0)
                    //{
                    //    if ((Now - lastAmazing).TotalSeconds > 10.0d)
                    //    {
                    //        lastAmazing = Now;
                    //        _soundsHelper.PlaySound("you were amazing", 0.5d);
                    //    }
                    //}
                    //else
                    //{
                    //    _soundsHelper.PlaySound("skeletons number changed", 0.2d);
                    //}
                    //talkerToHuman.ensureAnnouncementDelay();

                    if (skelsCount > 0)
                    {
                        frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos;  // snap a picture

                        //_mainWindow.PlayRandomSound();
                        //talkerToHuman.Say(9, "" + skelsCount + " tasty human" + (skelsCount > 1 ? "s" : ""));
                        HeadlightsOff();
                    }
                    else
                    {
                        lostSkeletons = true;
                    }
                }

                if (skelsCount > 0)
                {
                    haveSkeleton = true;

                    #region Have a skeleton, follow it

                    lastHadSkeletons = Now;

                    // found the first skeleton; track it:
                    VisualizableSkeletonInformation vsi = skels.FirstOrDefault();

                    if (vsi == null)
                    {
                        // this really, really should not happen, especially now when we allocate frameProcessor.AllSkeletons for every frame.
                        Tracer.Error("StrategyPersonFollowing() vsi == null");
                        return;
                    }

                    VisualizableJoint targetJoint = vsi.JointPoints[targetJointType];
                    //bool isSkeletonActive = vsi.IsSkeletonActive;     always true
                    SkeletonPose skeletonPose = vsi.SkeletonPose;

                    // when changed, announce pose and react to it:
                    ReactOnSkeletonPose(skeletonPose);

                    // Warning: VisualizableJoint::ComputePanTilt() can set Pan or Tilt to NaN
                    if (targetJoint != null && !double.IsNaN(targetJoint.Pan) && !double.IsNaN(targetJoint.Tilt))
                    {
                        haveATargetNow = true;

                        SetLightsTrackingSkeleton(true);

                        double targetPanRelativeToRobot = _state.currentPanKinect + targetJoint.Pan;
                        double targetPanRelativeToHead  = targetJoint.Pan;

                        //Tracer.Trace("==================  currentPanKinect=" + _state.currentPanKinect + "   targetJoint.Pan=" + targetJoint.Pan + "   targetPanRelativeToRobot=" + targetPanRelativeToRobot);

                        // guns rotate (pan) with Kinect, but tilt independently of Kinect. They are calibrated when Kinect tilt = 0
                        targetPan  = targetPanRelativeToHead;
                        targetTilt = targetJoint.Tilt + _state.currentTiltKinect;

                        double kinectTurnEstimate = targetPanRelativeToRobot - _state.currentPanKinect;
                        bool   shouldTurnKinect   = Math.Abs(kinectTurnEstimate) > smallMovementsAngleTreshold;   // don't follow small movements

                        SetDesiredKinectPlatformPan(shouldTurnKinect ? (double?)targetPanRelativeToRobot : null); // will be processed in computeAndExecuteKinectPlatformTurn() when head turn measurement comes.

                        setPanTilt(targetPan, targetTilt);

                        double distanceToHumanMeters = targetJoint.Z;                                      // actual distance from Kinect to human

                        bool tooCloseToHuman  = distanceToHumanMeters < TargetDistanceToGoalMeters - 0.1d; // cannot shoot, likely backing up
                        bool veryCloseToHuman = distanceToHumanMeters < TargetDistanceToGoalMeters + 0.1d; // can talk to human, likely in the dead zone and not moving

                        #region Greet the Human

                        if (veryCloseToHuman && talkerToHuman.canTalk())
                        {
                            frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos;  // snap a picture
                            talkerToHuman.TalkToHuman();
                        }

                        #endregion // Greet the Human

                        #region Shoot the Human

                        if (skeletonPose == shootingPose)
                        {
                            if (!tooCloseToHuman)
                            {
                                //lock (shootingPoseLock)
                                //{
                                if (!shotAtHuman && (Now - lastShotAtHuman).TotalSeconds > 2.0d)
                                {
                                    lastShotAtHuman = Now;
                                    shotAtHuman     = true;
                                    talkerToHuman.Say(9, "good boy");
                                    SpawnIterator(ShootGunOnce);
                                }
                                //}
                            }
                        }
                        else
                        {
                            shotAtHuman = false;
                        }

                        #endregion // Shoot the Human

                        ComputeMovingVelocity(distanceToHumanMeters, targetPanRelativeToRobot, 0.25d, 10.0d);
                    }
                    // else
                    // {
                    //      // we have skeleton(s) but the target joint is not visible. What to do here?
                    // }
                    #endregion // Have a skeleton, follow it
                }
                else if ((Now - lastHadSkeletons).TotalSeconds < 1.0d)
                {
                    return; // may be just temporary loss of skeletons, wait a little before switching to red shirt
                }
            }               // end ignoreKinectSkeletons

            if (!_mapperVicinity.robotState.ignoreRedShirt && !haveSkeleton && frameProcessor.videoSurveillanceDecider != null)
            {
                #region Have a red shirt, follow it

                VideoSurveillanceTarget target = frameProcessor.videoSurveillanceDecider.mainColorTarget;

                if (target != null && (Now - target.TimeStamp).TotalSeconds < 0.5d)     // must also be recent
                {
                    lastHadRedShirt = Now;

                    haveATargetNow = true;

                    SetLightsTrackingRedShirt(true);

                    double targetPanRelativeToRobot = target.Pan;   // already adjusted for currentPanKinect

                    //Tracer.Trace("+++++++++++++++  currentPanKinect=" + _state.currentPanKinect + "   target.Pan=" + target.Pan + "   targetPanRelativeToRobot=" + targetPanRelativeToRobot);
                    //Tracer.Trace("   target.Pan=" + target.Pan + "   Tilt=" + target.Tilt);

                    // guns rotate (pan) with Kinect, but tilt independently of Kinect. They are calibrated when Kinect tilt = 0
                    targetPan  = targetPanRelativeToRobot - _state.currentPanKinect;
                    targetTilt = target.Tilt; // currentTiltKinect already accounted for by VideoSurveillance
                    //Tracer.Trace("+++++++++++++++  currentTiltKinect=" + _state.currentTiltKinect + "   target.Tilt=" + target.Tilt + "   targetTilt=" + targetTilt);

                    //if((DateTime.Now - lastTurnedKinectPlatform).TotalSeconds > 1.0d)
                    {
                        lastTurnedKinectPlatform = DateTime.Now;
                        double kinectTurnEstimate = targetPan;                                                    // targetPanRelativeToRobot - _state.currentPanKinect;
                        bool   shouldTurnKinect   = Math.Abs(kinectTurnEstimate) > smallMovementsAngleTreshold;   // don't follow small movements

                        SetDesiredKinectPlatformPan(shouldTurnKinect ? (double?)targetPanRelativeToRobot : null); // will be processed in computeAndExecuteKinectPlatformTurn() when head turn measurement comes.
                    }

                    //Tracer.Trace(string.Format("   targetPan={0:0.00}        Tilt={1:0.00}        PanKinect={2:0.00}", targetPan, targetTilt, _state.currentPanKinect));

                    setPanTilt(targetPan, targetTilt);

                    double bestKinectTilt = targetTilt;   // will be limited to +-27 degrees

                    SetDesiredKinectTilt(bestKinectTilt);

                    // choose robotTacticsType - current tactics is move towards human:

                    var mostRecentParkingSensor = _state.MostRecentParkingSensor;

                    double redShirtDistanceMetersEstimated = mostRecentParkingSensor == null ? TargetDistanceToGoalMeters : Math.Min(mostRecentParkingSensor.parkingSensorMetersLF, mostRecentParkingSensor.parkingSensorMetersRF);

                    //Tracer.Trace("redShirtDistanceEstimated = " + redShirtDistanceMetersEstimated);

                    ComputeMovingVelocity(redShirtDistanceMetersEstimated, targetPanRelativeToRobot, 0.35d, 10.0d);

                    if (_mapperVicinity.robotState.robotTacticsType == RobotTacticsType.None &&
                        Math.Abs(redShirtDistanceMetersEstimated - TargetDistanceToGoalMeters) < 0.35d &&
                        Math.Abs(targetPan) < 10.0d &&
                        (DateTime.Now - lastGunsFiredOnRed).TotalSeconds > 5.0d)
                    {
                        lastGunsFiredOnRed = Now;
                        //talkerToHuman.Say(9, "red shirt");
                        SpawnIterator(ShootGunOnce);
                    }

                    if (!hadATarget || lostSkeletons)                                        // just acquired target, or lost all Skeletons
                    {
                        frameProcessor.doSaveOneImage = _mapperVicinity.robotState.doPhotos; // snap a picture

                        //talkerToHuman.Say(9, "red shirt");
                        //nextAnnouncementDelay = _soundsHelper.Announce("$lady in red", nextAnnouncementDelayDefault, 0.05d);
                        //nextAnnouncementDelay = _soundsHelper.Announce("red shirt", nextAnnouncementDelayDefault, 0.05d);

                        talkerToHuman.rewindDialogue();
                    }
                }
                else
                {
                    if (target == null)
                    {
                        Tracer.Trace("-----------------  no main color target");
                    }
                    else
                    {
                        Tracer.Trace("-----------------  main color target too old at " + (Now - target.TimeStamp).TotalSeconds + " sec");
                    }
                }

                #endregion // Have a red shirt, follow it
            } // end ignoreRedShirt
            else if ((Now - lastHadRedShirt).TotalSeconds < 1.0d)
            {
                _mapperVicinity.robotDirection.bearing = null; // indication for tactics to compute collisions and stop.

                return;                                        // may be just temporary loss of red shirt, wait a little before switching to sound
            }
            else if (!haveSkeleton && !_mapperVicinity.robotState.ignoreKinectSounds)
            {
                // we let voice recognizer have control for several seconds, if we can't track skeleton or red shirt anyway.
                if ((Now - lastVoiceLocalized).TotalSeconds > 5.0d)
                {
                    // choose robotTacticsType - current tactics is Stop:
                    _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None;
                }
            }

            if (!haveATargetNow)
            {
                // no target means stopping
                PerformAvoidCollision(null, 1.0d);                  // just in case
                setCurrentGoalDistance(null);
                _mapperVicinity.robotDirection.bearing      = null; // indication for tactics to compute collisions and stop.
                _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None;
                StopMoving();
                _state.MovingState = MovingState.Unable;
                _state.Countdown   = 0; // 0 = immediate response
            }

            if (hadATarget && !haveATargetNow)
            {
                lastLostTargets            = Now;
                secondsSinceLostTargetLast = -1;
                haveATargetNowState        = 0;

                if ((Now - lastThanksForStoppingBy).TotalSeconds > 60.0d)
                {
                    lastThanksForStoppingBy = Now;
                    talkerToHuman.Say(9, "thanks for stopping by!");
                }
                //talkerToHuman.Say(9, "lost all humans");
                //string messageToSay = "$lost all humans";
                //nextAnnouncementDelay = _soundsHelper.Announce(messageToSay, nextAnnouncementDelayDefault, 0.1d);

                talkerToHuman.rewindDialogue();

                lastTargetPanSwitch = 0;
                StartHeadAnimationCombo(HeadComboAnimations.Restpose, false);
                AddHeadAnimationCombo(HeadComboAnimations.BlinkCycle, true, 0.4d);
            }

            hadATarget = haveATargetNow;    // set flag for the next cycle

            #region Target Lost Routine

            if (!haveATargetNow)
            {
                if (_mapperVicinity.robotState.doLostTargetRoutine)
                {
                    // after losing targets, rotate both directions for a while, and then stop and wait:

                    int secondsSinceLostTarget = (int)Math.Round((Now - lastLostTargets).TotalSeconds);

                    if (secondsSinceLostTarget != secondsSinceLostTargetLast)
                    {
                        // we come here once every second when the target is not in view.
                        secondsSinceLostTargetLast = secondsSinceLostTarget;

                        if (secondsSinceLostTarget <= 30)
                        {
                            HeadlightsOn();

                            double tmpPanKinect = 0.0d;

                            switch (secondsSinceLostTarget)
                            {
                            case 0:
                            case 1:
                                // stop for now:
                                setCurrentGoalDistance(null);
                                _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None;
                                SetDesiredKinectTilt(3.0d);
                                return;

                            case 2:
                            case 3:
                            case 4:
                                if (haveATargetNowState != 1)
                                {
                                    tmpPanKinect = 50.0d * Math.Sign(targetPan);
                                    Tracer.Trace("setPanTilt()  1  Kinect pan=" + tmpPanKinect);
                                    SetDesiredKinectPlatformPan(tmpPanKinect);
                                    setGunsParked();
                                    haveATargetNowState = 1;
                                    talkerToHuman.Say(9, "One");
                                }
                                break;

                            case 5:
                            case 6:
                            case 7:
                                if (haveATargetNowState != 2)
                                {
                                    Tracer.Trace("setPanKinect()  2  Kinect pan=0");
                                    SetDesiredKinectPlatformPan(0.0d);
                                    haveATargetNowState = 2;
                                    talkerToHuman.Say(9, "Two");
                                }
                                break;

                            case 8:
                            case 9:
                            case 10:
                                if (haveATargetNowState != 3)
                                {
                                    tmpPanKinect = -50.0d * Math.Sign(targetPan);
                                    Tracer.Trace("setPanKinect()  3  Kinect pan=" + tmpPanKinect);
                                    SetDesiredKinectPlatformPan(tmpPanKinect);
                                    haveATargetNowState = 3;
                                    talkerToHuman.Say(9, "Three");
                                }
                                break;

                            case 11:
                            case 12:
                                if (haveATargetNowState != 4)
                                {
                                    Tracer.Trace("setPanKinect()  4  Kinect pan=0");
                                    SetDesiredKinectPlatformPan(0.0d);
                                    haveATargetNowState = 4;
                                    talkerToHuman.Say(9, "Four");
                                }
                                break;
                            }

                            if (secondsSinceLostTarget > 12 && secondsSinceLostTarget % 6 == 0 && lastTargetPanSwitch != secondsSinceLostTarget)
                            {
                                lastTargetPanSwitch = secondsSinceLostTarget;
                                targetPan           = -targetPan; // switch rotation direction every 6 seconds

                                Tracer.Trace("setPanKinect()  5  Kinect pan=0");
                                talkerToHuman.Say(9, "Switch");
                                SetDesiredKinectPlatformPan(0.0d);
                            }

                            setCurrentGoalBearingRelativeToRobot(60.0d * Math.Sign(targetPan));   // keep in the same direction where the target last was, aiming at 60 degrees for a steep turn in place

                            // choose robotTacticsType - rotate towards where the target was last seen:
                            setCurrentGoalDistance(TargetDistanceToGoalMeters);
                            FollowDirectionMaxVelocityMmSec             = MinimumForwardVelocityMmSec; // ;ModerateForwardVelocityMmSec
                            _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.FollowDirection;
                        }
                        else
                        {
                            // stop, sing a song and wait for a target to appear:
                            FollowDirectionMaxVelocityMmSec = 0.0d;
                            setCurrentGoalDistance(null);
                            _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None;
                            haveATargetNowState = 0;

                            int lonelyPlayTime = 180;     // the song is 2:40 - give it 3 minutes to play

                            if (secondsSinceLostTarget % 20 == 0 && (lastWaitingForHumansAnnounced == 0 || lastWaitingForHumansAnnounced == secondsSinceLostTarget - lonelyPlayTime))
                            {
                                lastWaitingForHumansAnnounced = secondsSinceLostTarget;
                                //talkerToHuman.Say(9, "waiting for humans");
                                _soundsHelper.Announce("$lonely", 5.0d, 0.05d);     // play "I-am-Mr-Lonely.mp3" really quietly

                                talkerToHuman.rewindDialogue();
                                lastWaitingForHumansAnnounced = 0;

                                HeadlightsOff();
                            }

                            Tracer.Trace("secondsSinceLostTarget=" + secondsSinceLostTarget);

                            if (secondsSinceLostTarget % 10 == 0)
                            {
                                Tracer.Trace("setPanKinect()  5  Kinect pan=" + panKinectSearchAngles[panKinectSearchIndex]);

                                SetDesiredKinectTilt(3.0d);
                                SetDesiredKinectPlatformPan(panKinectSearchAngles[panKinectSearchIndex++]);
                                if (panKinectSearchIndex >= panKinectSearchAngles.Length)
                                {
                                    panKinectSearchIndex = 0;
                                }
                            }
                        }
                    }
                }
                else    // !doLostTargetRoutine
                {
                    // just assume safe position and wait till a new target appears in front of the camera:
                    HeadlightsOff();
                    if ((DateTime.Now - lastLostTargets).TotalSeconds > 3.0d)
                    {
                        SafePosture();
                    }
                    // stop for now:
                    setCurrentGoalDistance(null);
                    _mapperVicinity.robotDirection.bearing      = null;
                    _mapperVicinity.robotState.robotTacticsType = RobotTacticsType.None;
                }
            }

            #endregion // Target Lost Routine
        }
コード例 #2
0
        public IEnumerator <ITask> ProcessImageFrame()
        {
            DateTime started = DateTime.Now;

            // default RGB image size is 640 x 480
            // setting other value (i.e. 320x240) in ...\TrackRoamer\TrackRoamerServices\Config\TrackRoamer.TrackRoamerBot.Kinect.Config.xml does not seem to work (causes NUI initialization failure).

            byte[] srcImageBits          = this.RawFrames.RawColorFrameData;
            int    srcImageBytesPerPixel = this.RawFrames.RawColorFrameInfo.BytesPerPixel;
            int    srcImageWidth         = this.RawFrames.RawColorFrameInfo.Width;
            int    srcImageHeight        = this.RawFrames.RawColorFrameInfo.Height;

            //if (ProcessedImageWidth != this.RawFrames.RawImageFrameData.Image.Width || ProcessedImageHeight != this.RawFrames.RawImageFrameData.Image.Height)
            //{
            //    ProcessedImageWidth = this.RawFrames.RawImageFrameData.Image.Width;
            //    ProcessedImageHeight = this.RawFrames.RawImageFrameData.Image.Height;

            //    ImageBitsProcessed = new byte[ProcessedImageWidth * ProcessedImageHeight * 4];
            //}

            // we need to convert Kinect/MRDS service Image to OpenCV Image - that takes converting first to a BitmapSource and then to System.Drawing.Bitmap:
            BitmapSource srcBitmapSource = BitmapSource.Create(srcImageWidth, srcImageHeight, 96, 96, PixelFormats.Bgr32, null, srcImageBits, srcImageWidth * srcImageBytesPerPixel);

            if (doSaveOneImage)
            {
                doSaveOneImage = false;

                SaveBitmapSource(srcBitmapSource);
            }

            Image <Bgr, byte>  img      = new Image <Bgr, byte>(BitmapSourceToBitmap(srcBitmapSource));
            Image <Gray, byte> gimg     = null;
            Image <Bgr, byte>  filtered = null;

            img._SmoothGaussian(11); //filter out noises

            // from here we can operate OpenCV / Emgu Image, at the end converting Image to BitmapProcessed:

            if (videoSurveillanceDecider == null)
            {
                videoSurveillanceDecider = new VideoSurveillanceDecider(img.Width, img.Height);
            }

            videoSurveillanceDecider.Init();

            if (doColorRecognition)
            {
                // color detection (T-shirt, cone...):

                //lock (videoSurveillanceDecider)
                //{
                //    videoSurveillanceDecider.purgeColorBlobs();
                //}

                filtered = img.Clone().SmoothBlur(13, 13);       //.SmoothGaussian(9);

                byte[, ,] data = filtered.Data;
                int    nRows = filtered.Rows;
                int    nCols = filtered.Cols;
                double averageBrightnessTmp = 0.0d;

                colorTresholdMain = averageBrightness / 2.0d;
                double colorFactorMain = 256.0d * colorFactor / averageBrightness;

                /*
                 */
                // leave only pixels with distinct red color in the "filtered":
                for (int i = nRows - 1; i >= 0; i--)
                {
                    for (int j = nCols - 1; j >= 0; j--)
                    {
                        // R component (2) must be greater than B (0) and G (1) by the colorFactor; dark areas are excluded:
                        double compR = data[i, j, 2];
                        double compG = data[i, j, 1];
                        double compB = data[i, j, 0];

                        double compSum = compR + compG + compB;         // brightness
                        averageBrightnessTmp += compSum;

                        if (compR > colorTresholdMain)                   //&& compG > colorTreshold && compB > colorTreshold)
                        {
                            compR = (compR / compSum) / colorFactorMain; // adjusted for brightness
                            compG = compG / compSum;
                            compB = compB / compSum;
                            if (compR > compG && compR > compB)
                            {
                                data[i, j, 0] = data[i, j, 1] = 0;    // B, G
                                data[i, j, 2] = 255;                  // R
                            }
                            else
                            {
                                data[i, j, 0] = data[i, j, 1] = data[i, j, 2] = 0;
                            }
                        }
                        else
                        {
                            // too dark.
                            data[i, j, 0] = data[i, j, 1] = data[i, j, 2] = 0;
                        }
                    }
                }

                averageBrightness = averageBrightnessTmp / (nRows * nCols * 3.0d); // save it for the next cycle

                gimg = filtered.Split()[2];                                        // make a grey image out of the Red channel, supposedly containing all red objects.

                // contour detection:

                int areaTreshold = 300;     // do not consider red contours with area in pixels less than areaTreshold.

                Contour <System.Drawing.Point> contours;
                MemStorage store = new MemStorage();

                // make a linked list of contours from the red spots on the screen:
                contours = gimg.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, store);
                CvInvoke.cvZero(gimg.Ptr);

                if (contours != null)
                {
                    CvInvoke.cvDrawContours(img.Ptr, contours.Ptr, new MCvScalar(255, 0, 0), new MCvScalar(255, 255, 255), 2, 2, Emgu.CV.CvEnum.LINE_TYPE.CV_AA, System.Drawing.Point.Empty);

                    List <ContourContainer> contourContainers = new List <ContourContainer>();

                    for (; contours != null; contours = contours.HNext)
                    {
                        contours.ApproxPoly(contours.Perimeter * 0.02, 0, contours.Storage);
                        if (contours.Area > areaTreshold)
                        {
                            contourContainers.Add(new ContourContainer()
                            {
                                contour = contours
                            });

                            //int centerX = contours.BoundingRectangle.X + contours.BoundingRectangle.Width / 2;
                            //int centerY = contours.BoundingRectangle.Y + contours.BoundingRectangle.Height / 2;
                            //img.Draw(contours.BoundingRectangle, new Bgr(64.0, 64.0, 255.0), 2);
                            //img.Draw(Math.Round((double)(((int)contours.Area) / 100) * 100).ToString(), ref _font, new System.Drawing.Point(centerX, centerY), new Bgr(64.0, 64.0, 255.0));
                        }
                    }

                    // for the VideoSurveillanceDecider to work, we need to supply blobs IDs - generate them as numbers in a size-ordered list, offset by 1000:
                    var ccs = from cc in contourContainers
                              orderby cc.contour.Area descending
                              select cc;

                    int ccId        = 0;
                    int goodCounter = 0;
                    lock (videoSurveillanceDecider)
                    {
                        videoSurveillanceDecider.purgeColorBlobs();

                        foreach (ContourContainer cc in ccs)
                        {
                            cc.ID = 1000 + ccId;      // offset not to overlap with VideoSurveillance-generated blobs
                            VideoSurveillanceTarget target = videoSurveillanceDecider.Update(cc, currentPanKinect, currentTiltKinect);
                            ccId++;
                            if (target != null && target.Rank > 1.0d)
                            {
                                goodCounter++;
                                if (goodCounter > 10000)  // take 10 largest good ones
                                {
                                    break;
                                }
                            }
                        }

                        if (!doSurveillance)
                        {
                            videoSurveillanceDecider.Commit();
                            videoSurveillanceDecider.ComputeMainColorTarget();
                            videoSurveillanceDecider.Draw(img);             // must run under lock
                        }
                    }
                }
            }

            if (doSurveillance)
            {
                // blob detection by Emgu.CV.VideoSurveillance:

                if (_tracker == null)
                {
                    _tracker  = new BlobTrackerAuto <Bgr>();
                    _detector = new FGDetector <Bgr>(FORGROUND_DETECTOR_TYPE.FGD);
                }

                Image <Bgr, byte> imgSmall = img.Resize(0.5d, INTER.CV_INTER_NN);      // for the full image - _tracker.Process() fails to allocate 91Mb of memory

                #region use the BG/FG detector to find the forground mask
                _detector.Update(imgSmall);
                Image <Gray, byte> forgroundMask = _detector.ForgroundMask;
                #endregion

                _tracker.Process(imgSmall, forgroundMask);

                lock (videoSurveillanceDecider)
                {
                    videoSurveillanceDecider.PurgeAndCommit();      // make sure that obsolete Surveillance targets are removed

                    foreach (MCvBlob blob in _tracker)
                    {
                        // keep in mind that we were working on the scaled down (to 1/2 size) image. So all points should be multiplied by two.
                        VideoSurveillanceTarget target = videoSurveillanceDecider.Update(blob, currentPanKinect, currentTiltKinect);
                    }

                    videoSurveillanceDecider.ComputeMainColorTarget();
                    videoSurveillanceDecider.Draw(img);             // must run under lock
                }
            }

            Bgr color = new Bgr(0.0, 128.0, 128.0);

            // draw center vertical line:
            System.Drawing.Point[] pts = new System.Drawing.Point[2];
            pts[0] = new System.Drawing.Point(img.Width / 2, 0);
            pts[1] = new System.Drawing.Point(img.Width / 2, img.Height);

            img.DrawPolyline(pts, false, color, 1);

            // draw center horizontal line:
            pts[0] = new System.Drawing.Point(0, img.Height / 2);
            pts[1] = new System.Drawing.Point(img.Width, img.Height / 2);

            img.DrawPolyline(pts, false, color, 1);

            // draw a sighting frame for precise alignment:
            // Horisontally the frame spans 16.56 degrees on every side and 12.75 degrees either up or down (at 74" the size it covers is 44"W by 33.5"H, i.e. 33.12 degrees by 25.5 degrees)
            System.Drawing.Point[] pts1 = new System.Drawing.Point[5];
            pts1[0] = new System.Drawing.Point(img.Width / 4, img.Height / 4);
            pts1[1] = new System.Drawing.Point(img.Width * 3 / 4, img.Height / 4);
            pts1[2] = new System.Drawing.Point(img.Width * 3 / 4, img.Height * 3 / 4);
            pts1[3] = new System.Drawing.Point(img.Width / 4, img.Height * 3 / 4);
            pts1[4] = new System.Drawing.Point(img.Width / 4, img.Height / 4);

            img.DrawPolyline(pts1, false, color, 1);

            // end of OpenCV / Emgu Image processing, converting the Image to BitmapProcessed:

            BitmapProcessed = img.ToBitmap();     // image with all processing marked
            //BitmapProcessed = filtered.ToBitmap();  // red image out of the Red channel
            //BitmapProcessed = gimg.ToBitmap();      // grey image; is CvZero'ed by this point
            //BitmapProcessed = forgroundMask.ToBitmap();

            //Tracer.Trace("Video processed in " + (DateTime.Now - started).TotalMilliseconds + " ms");       // usually 40...70ms

            yield break;
        }
コード例 #3
0
        // IComparable implementation:
        public int CompareTo(object obj)
        {
            VideoSurveillanceTarget other = obj as VideoSurveillanceTarget;

            return(Rank.CompareTo(other.Rank));
        }