Example #1
0
        /// <summary>
        /// creates a mesh from the current volume and tries to save it to a file
        /// </summary>
        /// <param name="volume">the volume</param>
        /// <param name="pkdp">the data package the mesh origined from</param>
        /// <param name="flipAxes">should achses be flipped?</param>
        static void exportMesh(ColorReconstruction volume, KinectDataPackage pkdp, bool flipAxes)
        {
            ColorMesh mesh = volume.CalculateMesh(1);

            Microsoft.Win32.SaveFileDialog dialog = new Microsoft.Win32.SaveFileDialog();
            dialog.FileName = "KinectFusionMesh_" + pkdp.usedConfig.name + DateTime.UtcNow.ToShortDateString() + ".stl";
            dialog.Filter   = "STL Mesh Files|*.stl|All Files|*.*";

            if (true == dialog.ShowDialog())
            {
                using (BinaryWriter writer = new BinaryWriter(dialog.OpenFile()))
                {
                    if (null == mesh || null == writer)
                    {
                        return;
                    }

                    var vertices = mesh.GetVertices();
                    var normals  = mesh.GetNormals();
                    var indices  = mesh.GetTriangleIndexes();

                    // Check mesh arguments
                    if (0 == vertices.Count || 0 != vertices.Count % 3 || vertices.Count != indices.Count)
                    {
                        throw new Exception("Invalid Mesh Arguments");
                    }

                    char[] header = new char[80];
                    writer.Write(header);

                    // Write number of triangles
                    int triangles = vertices.Count / 3;
                    writer.Write(triangles);

                    // Sequentially write the normal, 3 vertices of the triangle and attribute, for each triangle
                    for (int i = 0; i < triangles; i++)
                    {
                        // Write normal
                        var normal = normals[i * 3];
                        writer.Write(normal.X);
                        writer.Write(flipAxes ? -normal.Y : normal.Y);
                        writer.Write(flipAxes ? -normal.Z : normal.Z);

                        // Write vertices
                        for (int j = 0; j < 3; j++)
                        {
                            var vertex = vertices[(i * 3) + j];
                            writer.Write(vertex.X);
                            writer.Write(flipAxes ? -vertex.Y : vertex.Y);
                            writer.Write(flipAxes ? -vertex.Z : vertex.Z);
                        }

                        ushort attribute = 0;
                        writer.Write(attribute);
                    }
                }
            }
        }
Example #2
0
        /// <summary>
        /// constructor, creates a hashset of points from the mesh
        /// </summary>
        /// <param name="pVolume">the current reconstruction volume</param>
        public PointCloud(ColorReconstruction pVolume)
        {
            //takes the recontruction volume, exports the mesh and extracts the pointcloud from it
            ColorMesh mesh = pVolume.CalculateMesh(1);
            IReadOnlyCollection <Vector3> temp = mesh.GetVertices();

            //create a hashset
            pointcloud_hs = createHashset(temp);
        }
Example #3
0
        public PointCloudProcessor(Engine e)
        {
            this.engine = e;
            var resampler = e.Resampler;

            this.volume = e.FusionVolume.Reconstruction;
            DownsampledSmoothDepthFloatFrame  = new FusionFloatImageFrame(resampler.DownsampledWidth, resampler.DownsampledHeight);
            DownsampledRaycastPointCloudFrame = new FusionPointCloudImageFrame(resampler.DownsampledWidth, resampler.DownsampledHeight);
            DownsampledDepthPointCloudFrame   = new FusionPointCloudImageFrame(resampler.DownsampledWidth, resampler.DownsampledHeight);
            DepthPointCloudFrame   = new FusionPointCloudImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);
            RaycastPointCloudFrame = new FusionPointCloudImageFrame(KinectSettings.DEPTH_WIDTH, KinectSettings.DEPTH_HEIGHT);
        }
Example #4
0
        public FusionVolume(Engine e, Matrix4 startingWorldToCameraTx)
        {
            this.Engine = e;
            ReconstructionParameters volParam = new ReconstructionParameters(FusionVolume.VoxelsPerMeter, FusionVolume.VoxelsX, FusionVolume.VoxelsY, FusionVolume.VoxelsZ);

            WorldToCameraTransform             = startingWorldToCameraTx;
            this.Reconstruction                = ColorReconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, WorldToCameraTransform);
            this.DefaultWorldToVolumeTransform = this.Reconstruction.GetCurrentWorldToVolumeTransform();
            Renderer = new VolumeRenderer(e);
            resetter = new VolumeResetter();
            ResetReconstruction(0.4f, 0.10f);
            // this.resetter.ResetReconstruction(this, startingWorldToCameraTx);
        }
        /// <summary>
        /// Constructs and prepares the ColorReconstruction for data input
        /// </summary>
        private void RecreateReconstruction()
        {
            ReconstructionParameters volParam = new ReconstructionParameters(voxelsPerMeter, voxelsX, voxelsY, voxelsZ);

            worldToCameraTransform = Matrix4.Identity;
            ReconstructionProcessor ProcessorType = ReconstructionProcessor.Amp;

            volume = ColorReconstruction.FusionCreateReconstruction(volParam, ProcessorType, -1, worldToCameraTransform);
            defaultWorldToVolumeTransform = volume.GetCurrentWorldToVolumeTransform();
            ResetReconstruction();

            worldToBGRTransform     = Matrix4.Identity;
            worldToBGRTransform.M11 = voxelsPerMeter / voxelsX;
            worldToBGRTransform.M22 = voxelsPerMeter / voxelsY;
            worldToBGRTransform.M33 = voxelsPerMeter / voxelsZ;
            worldToBGRTransform.M41 = 0.5f;
            worldToBGRTransform.M42 = 0.5f;
            worldToBGRTransform.M44 = 1.0f;
        }
Example #6
0
        /// <summary>
        /// constructor, creates new reconstruction volume
        /// </summary>
        internal DataIntegrationReconstruction()
        {
            this.canWorkLock = new Object();

            this.worldToCameraTransform = Matrix4.Identity;
            this.volume = ColorReconstruction.FusionCreateReconstruction(new ReconstructionParameters(
                                                                             Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.VoxelsPerMeter,
                                                                             Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.VoxelResolutionX,
                                                                             Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.VoxelResolutionY,
                                                                             Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.VoxelResolutionZ),
                                                                         Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.ProcessorType,
                                                                         Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.DeviceToUse,
                                                                         worldToCameraTransform);
            this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

            depthFloatFrame = new FusionFloatImageFrame(Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.depthWidth,
                                                        Config.ServerConfigManager._ServerConfigObject.serverKinectFusionConfig.depthHeight);
            depthFloatFrameDepthPixels = new float[depthFloatFrame.Width * depthFloatFrame.Height];
            depthFloatFramePixelsArgb  = new int[depthFloatFrame.Width * depthFloatFrame.Height];
        }
Example #7
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int   voxelsX       = 512;
            int   voxelsY       = 384;
            int   voxelsZ       = 512;

            reconstructionParameters = new ReconstructionParameters(voxelPerMeter, voxelsX, voxelsY, voxelsZ);

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction(reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform);

            // Fusionのイメージフレームを作成
            cameraParameters      = CameraParameters.Defaults;
            depthImageFrame       = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            smoothDepthImageFrame = new FusionFloatImageFrame(depthWidth, depthHeight, cameraParameters);
            colorImageFrame       = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
            pointCloudImageFrame  = new FusionPointCloudImageFrame(depthWidth, depthHeight, cameraParameters);
            surfaceImageFrame     = new FusionColorImageFrame(depthWidth, depthHeight, cameraParameters);
        }
Example #8
0
        public VolumeBuilder(Scanner source, Dispatcher dispatcher)
        {
            if (source == null)
            {
                throw new ArgumentNullException("source");
            }

            this.source     = source;
            this.dispatcher = dispatcher;

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            // Map world X axis to blue channel, Y axis to green channel and Z axis to red channel,
            // normalizing each to the range [0, 1]. We also add a shift of 0.5 to both X,Y channels
            // as the world origin starts located at the center of the front face of the volume,
            // hence we need to map negative x,y world vertex locations to positive color values.
            this.worldToBGRTransform     = Matrix4.Identity;
            this.worldToBGRTransform.M11 = settings.VoxelsPerMeter / settings.VoxelsX;
            this.worldToBGRTransform.M22 = settings.VoxelsPerMeter / settings.VoxelsY;
            this.worldToBGRTransform.M33 = settings.VoxelsPerMeter / settings.VoxelsZ;
            this.worldToBGRTransform.M41 = 0.5f;
            this.worldToBGRTransform.M42 = 0.5f;
            this.worldToBGRTransform.M44 = 1.0f;

            var volumeParameters = new ReconstructionParameters(settings.VoxelsPerMeter, settings.VoxelsX, settings.VoxelsY, settings.VoxelsZ);

            this.volume = ColorReconstruction.FusionCreateReconstruction(volumeParameters, ReconstructionProcessor.Amp, -1, this.worldToCameraTransform);

            var depthWidth  = this.source.Frame.DepthWidth;
            var depthHeight = this.source.Frame.DepthHeight;
            var depthSize   = depthWidth * depthHeight;

            this.depthFloatFrame                 = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.smoothDepthFloatFrame           = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.resampledColorFrame             = new FusionColorImageFrame(depthWidth, depthHeight);
            this.resampledColorFrameDepthAligned = new FusionColorImageFrame(depthWidth, depthHeight);
            this.deltaFromReferenceFrame         = new FusionFloatImageFrame(depthWidth, depthHeight);
            this.shadedSurfaceFrame              = new FusionColorImageFrame(depthWidth, depthHeight);
            this.raycastPointCloudFrame          = new FusionPointCloudImageFrame(depthWidth, depthHeight);
            this.depthPointCloudFrame            = new FusionPointCloudImageFrame(depthWidth, depthHeight);

            var downsampledDepthWidth  = depthWidth / settings.DownsampleFactor;
            var downsampledDepthHeight = depthHeight / settings.DownsampleFactor;
            var downsampledDepthSize   = downsampledDepthWidth * downsampledDepthHeight;

            this.downsampledDepthFloatFrame                   = new FusionFloatImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledSmoothDepthFloatFrame             = new FusionFloatImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledRaycastPointCloudFrame            = new FusionPointCloudImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledDepthPointCloudFrame              = new FusionPointCloudImageFrame(downsampledDepthWidth, downsampledDepthHeight);
            this.downsampledDeltaFromReferenceFrameColorFrame = new FusionColorImageFrame(downsampledDepthWidth, downsampledDepthHeight);

            this.resampledColorData   = new int[depthSize];
            this.downsampledDepthData = new float[downsampledDepthSize];
            this.downsampledDeltaFromReferenceColorPixels = new int[downsampledDepthSize];
            this.deltaFromReferenceFramePixelsArgb        = new int[depthSize];
            this.shadedSurfaceFramePixelsArgb             = new int[this.shadedSurfaceFrame.PixelDataLength];

            this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

            this.volumeBitmap = new WriteableBitmap(depthWidth, depthHeight, settings.DefaultSystemDPI, settings.DefaultSystemDPI, PixelFormats.Bgr32, null);

            // Create a camera pose finder with default parameters
            this.cameraPoseFinder = CameraPoseFinder.FusionCreateCameraPoseFinder(CameraPoseFinderParameters.Defaults);

            this.workerThread = new Thread(WorkerThreadProc);
            this.workerThread.Start();
            this.source.Frame.OnDataUpdate += OnFrameDataUpdate;
        }
Example #9
0
        public void Evaluate(int SpreadMax)
        {
            this.VoxelResolutionX = this.FInVX[0];
            this.VoxelResolutionY = this.FInVY[0];
            this.VoxelResolutionZ = this.FInVZ[0];
            this.VoxelsPerMeter   = this.FInVPM[0];

            if (this.FTextureOutput[0] == null)
            {
                this.FTextureOutput[0] = new DX11Resource <DX11DynamicTexture2D>();
            }
            if (this.FPCOut[0] == null)
            {
                this.FPCOut[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }
            if (this.FGeomOut[0] == null)
            {
                this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FOutVoxels[0] == null)
            {
                this.FOutVoxels[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }

            if (this.FInExport[0])
            {
                this.FGeomOut[0].Dispose(); this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FInvalidateConnect)
            {
                this.FInvalidateConnect = false;

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    this.runtime = this.FInRuntime[0];
                    this.runtime.DepthFrameReady += this.runtime_DepthFrameReady;

                    var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);
                    this.worldToCameraTransform = Matrix4.Identity;

                    //this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);
                    this.colorVolume = ColorReconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);

                    //this.volume.

                    /*FusionPointCloudImageFrame pc;
                     * pc.*/

                    this.defaultWorldToVolumeTransform = this.colorVolume.GetCurrentWorldToVolumeTransform();

                    // Depth frames generated from the depth input
                    this.depthFloatBuffer = new FusionFloatImageFrame(width, height);

                    // Point cloud frames generated from the depth float input
                    this.pointCloudBuffer = new FusionPointCloudImageFrame(width, height);

                    // Create images to raycast the Reconstruction Volume
                    this.shadedSurfaceColorFrame = new FusionColorImageFrame(width, height);

                    this.ResetReconstruction();
                }
            }

            if (this.runtime != null)
            {
                bool needreset = this.FInReset[0];

                if (needreset)
                {
                    this.ResetReconstruction();
                }
            }
        }
Example #10
0
        void InitializeFusion()
        {
            // Reconstruction Parameters
            float voxelPerMeter = 256;
            int voxelsX = 512;
            int voxelsY = 384;
            int voxelsZ = 512;
            reconstructionParameters = new ReconstructionParameters( voxelPerMeter, voxelsX, voxelsY, voxelsZ );

            //カメラ座標の初期値をワールド座標に設定
            worldToCameraTransform = Matrix4.Identity;

            //FusionのReconstructionオブジェクトを作成
            reconstruction = ColorReconstruction.FusionCreateReconstruction( reconstructionParameters, ReconstructionProcessor.Amp, -1, worldToCameraTransform );

            // Fusionのイメージフレームを作成
            cameraParameters = CameraParameters.Defaults;
            depthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            smoothDepthImageFrame = new FusionFloatImageFrame( depthWidth, depthHeight, cameraParameters );
            colorImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
            pointCloudImageFrame = new FusionPointCloudImageFrame( depthWidth, depthHeight, cameraParameters );
            surfaceImageFrame = new FusionColorImageFrame( depthWidth, depthHeight, cameraParameters );
        }
Example #11
0
        /// <summary>
        /// Re-create the reconstruction object
        /// </summary>
        /// <returns>Indicate success or failure</returns>
        public bool RecreateReconstruction(FusionVolume vol, Matrix4 startingWorldToCameraTx)
        {
            lock (this.volumeLock)
            {
                if (null != vol.Reconstruction)
                {
                    vol.Reconstruction.Dispose();
                    vol.Reconstruction = null;
                }

                try
                {
                    ReconstructionParameters volParam = new ReconstructionParameters(FusionVolume.VoxelsPerMeter, FusionVolume.VoxelsX, FusionVolume.VoxelsY, FusionVolume.VoxelsZ);

                    // Set the world-view transform to identity, so the world origin is the initial camera location.
                    vol.WorldToCameraTransform = startingWorldToCameraTx;

                    vol.Reconstruction = ColorReconstruction.FusionCreateReconstruction(volParam, FusionVolume.ProcessorType, FusionVolume.DeviceToUse, vol.WorldToCameraTransform);

                    vol.DefaultWorldToVolumeTransform = vol.Reconstruction.GetCurrentWorldToVolumeTransform();

                    if (VolumeResetter.TranslateResetPoseByMinDepthThreshold)
                    {
                        ResetReconstruction(vol, startingWorldToCameraTx);
                    }
                    else
                    {
                        vol.Engine.CameraTracker.ResetTracking();
                        vol.Engine.ColorProcessor.ResetColorImage();
                    }

                    vol.Renderer.ResetWorldToBGR();

                    if (vol.Engine.CubeDrawer != null)
                    {
                        vol.Engine.CubeDrawer.UpdateVolumeCube();
                    }

                    vol.Renderer.ViewChanged = true;

                    return(true);
                }
                catch (ArgumentException)
                {
                    vol.Reconstruction = null;
                    logger.Log(LogLevel.Error, "Volume resolution not appropriatate");
                }
                catch (InvalidOperationException ex)
                {
                    vol.Reconstruction = null;
                    logger.Log(LogLevel.Error, ex);
                }
                catch (DllNotFoundException)
                {
                    vol.Reconstruction = null;
                    logger.Log(LogLevel.Error, "Missing Dll prerequisite for volume reconstruction");
                }
                catch (OutOfMemoryException)
                {
                    vol.Reconstruction = null;
                    logger.Log(LogLevel.Error, "Out of memory when recreating volume");
                }

                return(false);
            }
        }