예제 #1
0
        //DEBUG
        private void _ButtonDebugLoadMeshFromFileAndSetReference_Click(object sender, RoutedEventArgs e)
        {
            this._ButtonDebugLoadMeshFromFileAndSetReference.IsEnabled = false;
            Task t = Task.Factory.StartNew(() =>
            {
                Microsoft.Win32.OpenFileDialog dialog = new Microsoft.Win32.OpenFileDialog();
                dialog.FileName = "";
                dialog.Title    = "Load Mesh/es to set as reference";
                dialog.Filter   = "STL Mesh Files|*.stl|All Files|*.*";

                int loop = 0;
                while (true == dialog.ShowDialog())
                {
                    loop++;

                    if (loop == 1)
                    {
                        _DataManager.DebugSetReferenceCloud(new DataIntegration.PointCloud(dialog.FileName));
                        _DataManager.currentReferenceCloud.downsamplePointcloud(Config.ServerConfigManager._ServerConfigObject.serverAlgorithmConfig.downsamplingFactor);
                    }
                    else
                    {
                        DataIntegration.PointCloud pcl = new DataIntegration.PointCloud(dialog.FileName);
                        pcl.downsamplePointcloud(Config.ServerConfigManager._ServerConfigObject.serverAlgorithmConfig.downsamplingFactor);
                        double[,] transformationMatrix = new double[4, 4];
                        this.Dispatcher.Invoke(() =>
                        {
                            DebugTransformationMatrix traWin = new DebugTransformationMatrix();
                            traWin.ShowDialog();
                            transformationMatrix = traWin.transformationMatrix;
                        });
                        _DataManager.currentReferenceCloud.addPointcloudToReference(pcl,
                                                                                    transformationMatrix,
                                                                                    Config.ServerConfigManager._ServerConfigObject.serverAlgorithmConfig.ICP_perform,
                                                                                    Config.ServerConfigManager._ServerConfigObject.serverAlgorithmConfig.ICP_indis);
                    }
                }
            });

            t.ContinueWith((continuation) =>
            {
                this.Dispatcher.Invoke(() =>
                {
                    UpdateClientConnectionSatus();
                    this._DataManager_OnNewPointPicturesEvent(_DataManager.currentReferenceCloud.pictures);
                    this._ButtonSelectPlanes.IsEnabled  = true;
                    this._ButtonEuclidianScan.IsEnabled = true;
                    this._ButtonDebugLoadMeshFromFileAndSetReference.IsEnabled = true;
                });
            });
        }
예제 #2
0
        //DEBUG
        private void _ButtonDebugLoadMeshFromFileAndScan_Click(object sender, RoutedEventArgs e)
        {
            Microsoft.Win32.OpenFileDialog dialog = new Microsoft.Win32.OpenFileDialog();
            dialog.Title    = "Load Mesh to scan";
            dialog.FileName = "";
            dialog.Filter   = "STL Mesh Files|*.stl|All Files|*.*";

            if (true == dialog.ShowDialog())
            {
                Task t = new Task(() =>
                {
                    DataIntegration.PointCloud pcl = new DataIntegration.PointCloud(dialog.FileName);
                    pcl.downsamplePointcloud(Config.ServerConfigManager._ServerConfigObject.serverAlgorithmConfig.downsamplingFactor);
                    _DataManager._DataIntegrationManager_OnNewScanProcessEvent(pcl);
                });
                t.ContinueWith((continuation) => UpdateClientConnectionSatus());
                t.Start();
            }
        }
예제 #3
0
        /// <summary>
        /// creates three bitmaps based on a point cloud
        /// </summary>
        /// <param name="pInputCloud">the point cloud</param>
        /// <param name="pHeight">the height of the bitmap</param>
        /// <param name="pWidth">the width of the bitmap</param>
        /// <returns>the picture package containing top, bottom and front view</returns>
        public static PointCloudPicturePackage createPointCloudPicturePackageFromPointCloud(PointCloud pInputCloud, int pHeight, int pWidth)
        {
            //create data objects
            PointCloudPicturePackage resPack = new PointCloudPicturePackage();

            resPack.sideview   = new WriteableBitmap(pWidth, pHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Pbgra32, null);
            resPack.bottomview = new WriteableBitmap(pWidth, pHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Pbgra32, null);
            resPack.frontview  = new WriteableBitmap(pWidth, pHeight, 96.0, 96.0, System.Windows.Media.PixelFormats.Pbgra32, null);

            //clear bitmap and set initial color value
            resPack.sideview.Clear(System.Windows.Media.Colors.White);
            resPack.bottomview.Clear(System.Windows.Media.Colors.White);
            resPack.frontview.Clear(System.Windows.Media.Colors.White);

            //find absolute borders for point cloud
            PointCloudBorderPackage pcbp = findBordersAbsolute(pInputCloud);

            resPack.borders = pcbp;

            //draw stuff
            foreach (Point p in pInputCloud.pointcloud_hs)
            {
                //calculate relative location by norming: 0 <= x,y,z <= 1
                float x, y, z;
                x = (p.point.X + Math.Abs(pcbp.xmin)) / (Math.Abs(pcbp.xmax) + Math.Abs(pcbp.xmin));
                y = (p.point.Y + Math.Abs(pcbp.ymin)) / (Math.Abs(pcbp.ymax) + Math.Abs(pcbp.ymin));
                z = (p.point.Z + Math.Abs(pcbp.zmin)) / (Math.Abs(pcbp.zmax) + Math.Abs(pcbp.zmin));

                if (!(x >= 1 || y >= 1 || z >= 1))
                {
                    //draw picture: x,y for front; x,z for bottom; z,y for side;
                    resPack.frontview.SetPixel((int)(x * pWidth), (int)(y * pHeight), System.Windows.Media.Colors.DarkRed);
                    resPack.bottomview.SetPixel((int)(x * pWidth), (int)(z * pHeight), System.Windows.Media.Colors.DarkRed);
                    resPack.sideview.SetPixel((int)(z * pWidth), (int)(y * pHeight), System.Windows.Media.Colors.DarkRed);
                }
            }

            //return
            resPack.bottomview.Freeze(); resPack.frontview.Freeze(); resPack.sideview.Freeze();
            return(resPack);
        }
예제 #4
0
 /// <summary>
 /// adds a point cloud to the existing one
 /// </summary>
 /// <param name="addingCloud">the point cloud to be added</param>
 public void addPointcloudToReference(PointCloud addingCloud, double[,] transformationMatrix, bool useICP, int inlierDistance)
 {
     PointCloudIntegration.integratePointClouds(this, addingCloud, transformationMatrix, useICP, inlierDistance);
     resetLazyLoadingObjectsAfterUpdate();
 }
예제 #5
0
        /// <summary>
        /// processes the depth data package into the kinect fusion volume
        /// </summary>
        /// <param name="pKdp">the data package</param>
        void processDepthData(KinectDataPackage pKdp, System.Threading.CancellationToken pCancelToken)
        {
            lock (canWorkLock)
            {
                Log.LogManager.updateAlgorithmStatus("Kinect Fusion integration");
                this.volume.ResetReconstruction(Matrix4.Identity);

                int picturesIntegrated = 0;
                foreach (ushort[] pDepth in pKdp.rawDepthData)
                {
                    pCancelToken.ThrowIfCancellationRequested();
                    WriteableBitmap            bitmap                  = new WriteableBitmap(this.depthFloatFrame.Width, this.depthFloatFrame.Height, 96.0, 96.0, PixelFormats.Bgr32, null);
                    FusionFloatImageFrame      depthFloatBuffer        = new FusionFloatImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionPointCloudImageFrame pointCloudBuffer        = new FusionPointCloudImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    FusionColorImageFrame      shadedSurfaceColorFrame = new FusionColorImageFrame(this.depthFloatFrame.Width, this.depthFloatFrame.Height);
                    int[] voxelPixels = new int[this.depthFloatFrame.Width * this.depthFloatFrame.Height];

                    this.volume.DepthToDepthFloatFrame(
                        pDepth,
                        depthFloatBuffer,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.minDepthClip,
                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.maxDepthClip,
                        false);
                    float alignmentValue;
                    bool  trackingSucceeded = this.volume.ProcessFrame(depthFloatBuffer,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.iterationCount,
                                                                       Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.integrationWeight,
                                                                       out alignmentValue,
                                                                       volume.GetCurrentWorldToCameraTransform());

                    // If camera tracking failed, no data integration or raycast for reference
                    // point cloud will have taken place, and the internal camera pose
                    // will be unchanged.
                    if (!trackingSucceeded)
                    {
                        trackingErrorCount++;
                    }
                    else
                    {
                        Matrix4 calculatedCameraPose = volume.GetCurrentWorldToCameraTransform();

                        // Set the camera pose and reset tracking errors
                        worldToCameraTransform = calculatedCameraPose;
                        trackingErrorCount     = 0;
                    }

                    // Calculate the point cloud
                    volume.CalculatePointCloud(pointCloudBuffer, worldToCameraTransform);

                    // Shade point cloud and render
                    FusionDepthProcessor.ShadePointCloud(
                        pointCloudBuffer,
                        worldToCameraTransform,
                        null,
                        shadedSurfaceColorFrame
                        );

                    shadedSurfaceColorFrame.CopyPixelDataTo(voxelPixels);

                    bitmap.WritePixels(
                        new Int32Rect(0, 0, bitmap.PixelWidth, bitmap.PixelHeight),
                        voxelPixels,
                        bitmap.PixelWidth * sizeof(int),
                        0);

                    bitmap.Freeze();
                    OnNewFusionPictureEvent.BeginInvoke(pKdp.usedConfig.ID, bitmap, null, null);

                    picturesIntegrated++;
                    Log.LogManager.writeLogDebug("[DataIntegration:Reconstruction] " + picturesIntegrated + " of " + pKdp.rawDepthData.Count + " Pictures integrated");
                }

                //if request was calibration request, export meshes
                if (pKdp.usedConfig.clientRequestObject.requestType == ClientConfigObject.RequestType.calibration)
                {
                    exportMesh(volume, pKdp, false);
                    Log.LogManager.writeLog("[DataIntegration:Reconstruction] Mesh of " + pKdp.usedConfig.name + " exported.");
                    return;
                }

                //broadcast new point cloud
                PointCloud p = new PointCloud(volume);
                p.ConfigObject = pKdp.usedConfig;
                OnNewPointCloudEvent.BeginInvoke(p, null, null);
                Log.LogManager.writeLog("[DataIntegration:Reconstruction] All pictures of " + pKdp.usedConfig.name + " integrated");
                Log.LogManager.updateAlgorithmStatus("Done");
            }
        }