コード例 #1
0
        private double ComputeRMSE(Point3DCollection A, Point3DCollection B, pointmatcher.net.EuclideanTransform transform)
        {
            var k   = System.Numerics.Vector3.One;
            var sum = 0.0d;

            for (int i = 0; i < B.Count; i++)
            {
                k = transform.Apply(new System.Numerics.Vector3((float)A[i].X, (float)A[i].Y, (float)A[i].Z));
                var convertK = new Point3D(k.X, k.Y, k.Z);
                var error    = B[i] - convertK;
                sum += Vector3D.DotProduct(error, error);
            }

            var err = Math.Sqrt(sum / A.Count);

            if (err > 0.01f)
            {
                Log.WriteLog("Error derivation is too high: " + err);
            }
            else
            {
                Log.WriteLog("Error derivation is correct: " + err);
            }

            return(err);
        }
コード例 #2
0
        private void Init()
        {
            InitializeComponent();
            Log.InitLog(textBox, label_Cycle);


            // Initialize images
            _processingStage = new ProcessingStage(label_Status,
                                                   new BitmapImage(new Uri("pack://application:,,,/3DReconstructionWPF;component/Assets/Images/icons8-crossmark.png")),
                                                   new BitmapImage(new Uri("pack://application:,,,/3DReconstructionWPF;component/Assets/Images/icons8-checkmark.png")),
                                                   image_trackedFeature, image_rgbColor, image_depth);

            _renderer = new Renderer(group);
            _pcv      = new PointCloudView(_renderer);

            _sensor = KinectSensor.GetDefault();

            _initialTransformation = new pointmatcher.net.EuclideanTransform
            {
                translation = System.Numerics.Vector3.Zero,
                rotation    = System.Numerics.Quaternion.Normalize(System.Numerics.Quaternion.CreateFromRotationMatrix(new System.Numerics.Matrix4x4(
                                                                                                                           1, 0, 0, 0,
                                                                                                                           0, 1, 0, 0,
                                                                                                                           0, 0, 1, 0,
                                                                                                                           0, 0, 0, 1
                                                                                                                           )))
            };

            _icpData            = new ICP.ICPData(null, _initialTransformation);
            _icp                = new ICP();
            label_Cycle.Content = "cycle: " + _cycleRuns;

            if (_sensor != null)
            {
                if (_sensor.IsOpen && _sensor.IsAvailable)
                {
                    Log.WriteLog("Kinect capture data available!");
                }
            }

            // Init filters
            FilterGroup.InitFilters();
        }
コード例 #3
0
        private void ScanReading_Click(object sender, RoutedEventArgs e)
        {
            // for intersection testing
            //_displayPointCloud = _renderer.ReadData();
            //_rgbv._bvh = BVH.InitBVH(_displayPointCloud);

            var point1 = new Point3D(-0.8f, 0, 0);
            var point2 = new Point3D(-1f, 0, 0);
            var point3 = new Point3D(-0.6f, 0.5f, 0);
            var point4 = new Point3D(0.8f, -0.5f, 0);

            var pcReference = new Point3DCollection
            {
                point1,
                point2,
                point3
            };


            Log.WriteLog("--------------------");

            /*
             * var rotationAngle = 0.707106781187f;
             *
             * Matrix3D m = new Matrix3D(
             * rotationAngle, 0, rotationAngle, 0,
             * 0, 1, 0, 0,
             * -rotationAngle, 0, rotationAngle, 0,
             * 1, 0, 0, 1);
             *
             * // Transform the thumb according to m
             * _thumbReading = m.Transform(_readingFeatures[0]);
             * point1 = m.Transform(point1);
             * point2 = m.Transform(point2);
             * point3 = m.Transform(point3);
             * point4 = m.Transform(point4);
             *
             * var pcReading = new Point3DCollection
             * {
             *  point1,
             *  point2,
             *  point3
             * };
             */

            var depthData = _pcv.GetDepthDataFromLatestFrame();

            _reading         = depthData.Item1; // all points
            _readingFeatures = depthData.Item2; // only feature points [4]

            _initialTransformation = Util.ComputeInitialTransformation(_readingFeatures, _referenceFeatures);
            _reading = Parser3DPoint.FromDataPointsToPoint3DCollection(ICP.ApplyTransformation(_initialTransformation, Parser3DPoint.FromPoint3DToDataPoints(_reading)));

            ComputeRMSE(_referenceFeatures, _readingFeatures, _initialTransformation);

            _renderer.CreatePointCloud(_referenceFeatures, Brushes.Pink, false, 0.0125f);
            // Transform readingFeatures
            _renderer.CreatePointCloud(_readingFeatures, Brushes.YellowGreen, false, 0.0125f);
            _readingFeatures = Parser3DPoint.FromDataPointsToPoint3DCollection(ICP.ApplyTransformation(_initialTransformation, Parser3DPoint.FromPoint3DToDataPoints(_readingFeatures)));
            _renderer.CreatePointCloud(_readingFeatures, Brushes.Violet, false, 0.0125f);

            _renderer.CreatePointCloud(_reading, Brushes.BlueViolet, false, 0.0025f);

            // _renderer.CreatePointCloud(_reading, Brushes.Violet,false);

            _cycleRuns++;
            label_Cycle.Content = "cycle: " + _cycleRuns;
        }