Ejemplo n.º 1
0
        private Vector2<int>[] samplePositions; /** \brief are the positions of the points to reseed */

        #endregion Fields

        #region Methods

        public void initialize(OpenCl.ComputeContext computeContext, Vector2<int> imageSize)
        {
            int newTrackingPointCounter;
            int numberOfTrackingPoints;

            const float trackingDensity = 0.2f;
            const int searchRadius = 10;

            this.imageSize = imageSize;

            numberOfTrackingPoints = (int)((float)imageSize.x * (float)imageSize.y * trackingDensity * trackingDensity);

            samplePositions = new Vector2<int>[numberOfTrackingPoints];

            for( newTrackingPointCounter = 0; newTrackingPointCounter < numberOfTrackingPoints; newTrackingPointCounter++ )
            {
                Vector2<int> samplePosition;

                samplePosition = new Vector2<int>();
                samplePosition.x = (int)(((float)imageSize.x - 1.0f) * Misc.RandomUtil.radicalInverse(newTrackingPointCounter, 2));
                samplePosition.y = (int)(((float)imageSize.y - 1.0f) * Misc.RandomUtil.radicalInverse(newTrackingPointCounter, 3));

                samplePositions[newTrackingPointCounter] = samplePosition;
            }

            operatorFindNearestPosition = new OpenCl.OperatorFindNearestPosition();
            operatorFindNearestPosition.initialize(computeContext, searchRadius, imageSize);

            operatorFindNearestPosition.inputPositions = samplePositions;
        }
Ejemplo n.º 2
0
        public void calculate(ResourceMetric metric, OpenCl.ComputeContext computeContext)
        {
            // blur motion map
            bluredMotionMap = doBlurMotionMap(metric, computeContext);

            // downsample maps
            downsampleMaps(metric);

            // novelity motion
            calculateNovelity();
            calculateMasterNovelity();
        }
Ejemplo n.º 3
0
        private Map2d<float> doBlurMotionMap(ResourceMetric metric, OpenCl.ComputeContext computeContext)
        {
            Map2d<float> bluredMotionMap;

            metric.startTimer("visual attention", "blur motion map", "");

            bluredMotionMap = new Map2d<float>(motionMap.getWidth(), motionMap.getLength());

            blurMotionMap.inputMap = motionMap;
            blurMotionMap.outputMap = bluredMotionMap;

            blurMotionMap.calculate(computeContext);

            metric.stopTimer();

            return bluredMotionMap;
        }
Ejemplo n.º 4
0
        public void initialize(OpenCl.ComputeContext computeContext, Vector2<int> mapSize)
        {
            blurMotionMap.initialize(computeContext, 80, mapSize);

            allocateNovelityDetectors(mapSize);
            allocateDownsampledMaps(mapSize);
            allocateMasterNovelity(mapSize);
        }
Ejemplo n.º 5
0
        /*
         * reseeding the tracking points for the edges
         *
         * the chosen samplepoints search for a point *which must be next to the samplepoint*
         * if no point as found the sample is disgarded
         * (because it wouldn't wander around etc.)
         *
         * the functionality needs to be hardwired (not modifyable by genetic algorithm etc)
         */
        public void reseedTrackingPoints(ResourceMetric metric, OpenCl.ComputeContext computeContext, Map2d<bool> edgesImage)
        {
            int newTrackingPointCounter;

            System.Diagnostics.Debug.Assert(edgesImage.getWidth() == imageSize.x);
            System.Diagnostics.Debug.Assert(edgesImage.getLength() == imageSize.y);

            metric.startTimer("visual", "edge point reseed", "findNearestPosition");

            operatorFindNearestPosition.inputMap = edgesImage;
            operatorFindNearestPosition.calculate(computeContext);

            metric.stopTimer();

            metric.startTimer("visual", "edge point reseed", "");

            for( newTrackingPointCounter = 0; newTrackingPointCounter < samplePositions.Length; newTrackingPointCounter++ )
            {
                if( operatorFindNearestPosition.foundNewPositions[newTrackingPointCounter] )
                {
                    Vector2<int> foundPosition;

                    TrackedPixel newTrackedPixel;

                    foundPosition = operatorFindNearestPosition.outputPositions[newTrackingPointCounter];

                    newTrackedPixel = new TrackedPixel(imageSize.x);
                    newTrackedPixel.position = foundPosition.clone();//resultPosition.clone();
                    newTrackedPixel.oldPosition = foundPosition.clone();//resultPosition.clone();

                    trackedBorderPixels.Add(newTrackedPixel);
                }
            }

            metric.stopTimer();

            /*
            for( newTrackingPointCounter = 0; newTrackingPointCounter < samplePositions.Length; newTrackingPointCounter++ )
            {
                Vector2<int> samplePosition;
                Vector2<int> resultPosition;
                bool resultFound;

                const uint searchRadius = 10;

                samplePosition = samplePositions[newTrackingPointCounter];

                resultPosition = Algorithms.Visual.Find.findNearestPositionWhereMapIs(true, samplePosition, edgesImage, searchRadius, out resultFound);
                if( !resultFound )
                {
                    continue;
                }

                // we are here if it found a result point

                TrackedPixel newTrackedPixel;

                newTrackedPixel = new TrackedPixel(imageSize.x);
                newTrackedPixel.position = resultPosition.clone();
                newTrackedPixel.oldPosition = resultPosition.clone();

                trackedBorderPixels.Add(newTrackedPixel);

            }

            metric.stopTimer();
             */
        }