public IHttpActionResult GetAllCandidates()
        {
            MatchCandidates result = new MatchCandidates();

            result.Matches         = candidatesServices.GetAllCandidates().ToList();
            result.NumberOfMatches = result.Matches.Count();

            return(Ok(result));
        }
Exemple #2
0
 public PartialViewResult GetCandidates()
 {
     using (var client = new HttpClient())
     {
         client.BaseAddress = new Uri("http://localhost:2690");
         client.DefaultRequestHeaders.Accept.Clear();
         client.DefaultRequestHeaders.Accept.Add(new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));
         var response = client.GetAsync("api/Candidates").Result;
         if (response.IsSuccessStatusCode)
         {
             string                   json  = response.Content.ReadAsStringAsync().Result;
             MatchCandidates          model = JsonConvert.DeserializeObject <MatchCandidates>(json);
             ListCandidatesViewModels vm    = new ListCandidatesViewModels()
             {
                 Candidates = model.Matches
             };
             return(PartialView("_ListCandidates", vm));
         }
         else
         {
             return(PartialView("_ListCandidates", null));
         }
     }
 }
Exemple #3
0
        /// <summary>
        /// Perform camera pose finding when tracking is lost using AlignPointClouds.
        /// This is typically more successful than FindCameraPoseAlignDepthFloatToReconstruction.
        /// </summary>
        /// <returns>Returns true if a valid camera pose was found, otherwise false.</returns>
        public bool FindCameraPoseAlignPointClouds(byte[] colorPixels, FusionFloatImageFrame depthFloatFrame)
        {
            if (finder == null)
            {
                return(false);
            }

            var resampledColorFrame = ProcessColorForCameraPoseFinder(colorPixels);

            if (resampledColorFrame.Width == 0)
            {
                return(false);
            }                                                     //Didn't work

            MatchCandidates matchCandidates = finder.FindCameraPose(
                depthFloatFrame,
                resampledColorFrame);

            if (null == matchCandidates)
            {
                return(false);
            }

            int   poseCount   = matchCandidates.GetPoseCount();
            float minDistance = matchCandidates.CalculateMinimumDistance();

            if (0 == poseCount || minDistance >= CameraPoseFinderDistanceThresholdReject)
            {
                logger.Log(LogLevel.Warn, "Not enough matches");
                return(false);
            }

            // Smooth the depth frame
            var smooth = volume.Engine.DepthProcessor.SmoothDepthFloatFrame(depthFloatFrame);

            engine.PointCloudCalculator.UpdateDepthPointCloud(smooth);
            // Calculate point cloud from the smoothed frame

            double smallestEnergy = double.MaxValue;
            int    smallestEnergyNeighborIndex = -1;

            int     bestNeighborIndex      = -1;
            Matrix4 bestNeighborCameraPose = Matrix4.Identity;

            double bestNeighborAlignmentEnergy = Constants.MaxAlignPointCloudsEnergyForSuccess;

            // Run alignment with best matched poseCount (i.e. k nearest neighbors (kNN))
            int maxTests = System.Math.Min(MaxCameraPoseFinderPoseTests, poseCount);

            var neighbors = matchCandidates.GetMatchPoses();

            for (int n = 0; n < maxTests; n++)
            {
                // Run the camera tracking algorithm with the volume
                // this uses the raycast frame and pose to find a valid camera pose by matching the raycast against the input point cloud
                Matrix4 poseProposal = neighbors[n];

                // Get the saved pose view by raycasting the volume
                var pc = engine.PointCloudCalculator;
                this.volume.Reconstruction.CalculatePointCloud(pc.RaycastPointCloudFrame, poseProposal);

                bool success = this.volume.Reconstruction.AlignPointClouds(
                    pc.RaycastPointCloudFrame,
                    pc.DepthPointCloudFrame,
                    FusionDepthProcessor.DefaultAlignIterationCount,
                    resampledColorFrame,
                    out this.alignmentEnergy,
                    ref poseProposal);

                bool relocSuccess = success && this.alignmentEnergy <bestNeighborAlignmentEnergy && this.alignmentEnergy> Constants.MinAlignPointCloudsEnergyForSuccess;

                if (relocSuccess)
                {
                    bestNeighborAlignmentEnergy = this.alignmentEnergy;
                    bestNeighborIndex           = n;

                    // This is after tracking succeeds, so should be a more accurate pose to store...
                    bestNeighborCameraPose = poseProposal;

                    // Update the delta image
                    resampledColorFrame.CopyPixelDataTo(engine.DeltaCalculator.DeltaFromReferenceFramePixelsArgb);
                }

                // Find smallest energy neighbor independent of tracking success
                if (this.alignmentEnergy < smallestEnergy)
                {
                    smallestEnergy = this.alignmentEnergy;
                    smallestEnergyNeighborIndex = n;
                }
            }

            matchCandidates.Dispose();

            // Use the neighbor with the smallest residual alignment energy
            // At the cost of additional processing we could also use kNN+Mean camera pose finding here
            // by calculating the mean pose of the best n matched poses and also testing this to see if the
            // residual alignment energy is less than with kNN.
            if (bestNeighborIndex > -1)
            {
                this.WorldToCameraTransform = bestNeighborCameraPose;
                this.SetReferenceFrame(this.WorldToCameraTransform);

                // Tracking succeeded!
                engine.CameraTracker.SetTrackingSucceeded();

                engine.DeltaCalculator.UpdateAlignDeltas();

                logger.Log(LogLevel.Warn, "Camera Pose Finder SUCCESS! Residual energy= " + string.Format(CultureInfo.InvariantCulture, "{0:0.00000}", bestNeighborAlignmentEnergy) + ", " + poseCount + " frames stored, minimum distance=" + minDistance + ", best match index=" + bestNeighborIndex);

                return(true);
            }
            else
            {
                this.WorldToCameraTransform = neighbors[smallestEnergyNeighborIndex];
                this.SetReferenceFrame(this.WorldToCameraTransform);

                // Camera pose finding failed - return the tracking failed error code
                engine.CameraTracker.SetTrackingFailed();

                // Tracking Failed will be set again on the next iteration in ProcessDepth
                logger.Log(LogLevel.Warn, "Camera Pose Finder FAILED! Residual energy=" + string.Format(CultureInfo.InvariantCulture, "{0:0.00000}", smallestEnergy) + ", " + poseCount + " frames stored, minimum distance=" + minDistance + ", best match index=" + smallestEnergyNeighborIndex);
                return(false);
            }
        }