Beispiel #1
0
        public Report ReconstructReport()
        {
            var r = new Reconstruction(this);

            report = r.ReconstructReport();
            return(report);
        }
        protected virtual void Dispose(bool disposing)
        {
            if (!disposed)
            {
                if (depthFloatBuffer != null)
                {
                    depthFloatBuffer.Dispose();
                    depthFloatBuffer = null;
                }

                if (pointCloudBuffer != null)
                {
                    pointCloudBuffer.Dispose();
                    pointCloudBuffer = null;
                }

                if (shadedSurfaceColorFrame != null)
                {
                    shadedSurfaceColorFrame.Dispose();
                    shadedSurfaceColorFrame = null;
                }

                if (volume != null)
                {
                    volume.Dispose();
                    volume = null;
                }

                disposed = true;
            }
        }
Beispiel #3
0
        public ReconstructionController(KinectSensor sensor)
        {
            Contract.Requires(sensor != null);

            this.syncContext = SynchronizationContext.Current;
            this.sensor      = sensor;

            var rparams = new ReconstructionParameters(128, 256, 256, 256);

            reconstruction              = Reconstruction.FusionCreateReconstruction(rparams, ReconstructionProcessor.Amp, -1, worldToCameraTransform);
            worldToVolumeTransform      = reconstruction.GetCurrentWorldToVolumeTransform();
            worldToVolumeTransform.M43 -= MIN_DEPTH * rparams.VoxelsPerMeter;
            reconstruction.ResetReconstruction(worldToCameraTransform, worldToVolumeTransform);

            var depthFrameDesc = sensor.DepthFrameSource.FrameDescription;

            var totalPixels = depthFrameDesc.Width * depthFrameDesc.Height;

            rawDepthData  = new ushort[totalPixels];
            bodyIndexData = new byte[totalPixels];
            SurfaceBitmap = new ThreadSafeBitmap(depthFrameDesc.Width, depthFrameDesc.Height);

            var intrinsics = sensor.CoordinateMapper.GetDepthCameraIntrinsics();
            var cparams    = new CameraParameters(
                intrinsics.FocalLengthX / depthFrameDesc.Width,
                intrinsics.FocalLengthY / depthFrameDesc.Height,
                intrinsics.PrincipalPointX / depthFrameDesc.Width,
                intrinsics.PrincipalPointY / depthFrameDesc.Height);

            floatDepthFrame = new FusionFloatImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
            pointCloudFrame = new FusionPointCloudImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
            surfaceFrame    = new FusionColorImageFrame(depthFrameDesc.Width, depthFrameDesc.Height, cparams);
        }
        private void InitializeKinectFusion()
        {
            // KinecFusionの初期化
            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            volume = Reconstruction.FusionCreateReconstruction(volParam, ReconstructionProcessor.Amp, -1, Matrix4.Identity);

            // 変換バッファの作成
            depthFloatBuffer        = new FusionFloatImageFrame(DepthWidth, DepthHeight);
            pointCloudBuffer        = new FusionPointCloudImageFrame(DepthWidth, DepthHeight);
            shadedSurfaceColorFrame = new FusionColorImageFrame(DepthWidth, DepthHeight);

            // リセット
            volume.ResetReconstruction(Matrix4.Identity);
        }
Beispiel #5
0
 internal void Initialize(Reconstruction reconstruction)
 {
     this.mReconstruction = reconstruction;
     if (TrackerManager.Instance.GetTracker <SmartTerrainTracker>() != null)
     {
         if (this.mMaximumExtentEnabled)
         {
             this.mReconstruction.SetMaximumArea(this.mMaximumExtent);
         }
         if (this.mAutomaticStart)
         {
             SmartTerrainTracker tracker = TrackerManager.Instance.GetTracker <SmartTerrainTracker>();
             if (tracker != null)
             {
                 tracker.SmartTerrainBuilder.AddReconstruction(this);
             }
         }
         if (this.mNavMeshUpdates)
         {
             this.mReconstruction.StartNavMeshUpdates();
             this.mReconstruction.SetNavMeshPadding(this.mNavMeshPadding);
         }
         else
         {
             this.mReconstruction.StopNavMeshUpdates();
         }
         this.mHasInitialized = true;
         using (List <ISmartTerrainEventHandler> .Enumerator enumerator = this.mSmartTerrainEventHandlers.GetEnumerator())
         {
             while (enumerator.MoveNext())
             {
                 enumerator.Current.OnInitialized(default(SmartTerrainInitializationInfo));
             }
         }
         if (this.mOnInitialized != null)
         {
             this.mOnInitialized.InvokeWithExceptionHandling(default(SmartTerrainInitializationInfo));
             return;
         }
     }
     else
     {
         Debug.LogError("SmartTerrainTrackerBehaviour.Initialize: SmartTerrainTracker is null, aborting.");
     }
 }
Beispiel #6
0
        public override void Apply(Image image)
        {
            Image original = new Image(image.GetPixels(), image.Size);

            int intensity = 6;

            //Apply Erosion
            for (int i = 0; i < intensity; i++)
            {
                image.Apply(Operation.Operations.Erosion);
            }

            //Reconstruct image
            int[,] newPixels = Reconstruction.Apply(original, image).GetPixels();

            //Update image with new pixels:
            image.SetPixels(newPixels);
        }
Beispiel #7
0
        public string Get(Reconstruction recons)
        {
            string res = null;

            switch (recons)
            {
            case Reconstruction.Dienq:
                res = Dienq;
                break;

            case Reconstruction.Phuan:
                res = Phuan;
                break;

            case Reconstruction.Karlgren:
                res = Karlgren;
                break;

            case Reconstruction.Hvang:
                res = Hvang;
                break;

            case Reconstruction.Lyx:
                res = Lyx;
                break;

            case Reconstruction.Zjew:
                res = Zjew;
                break;

            default:
                res = PulleyBlank;
                break;
            }
            return(res);
        }
Beispiel #8
0
    static void Main(string[] args)
    {
        var recBinPath = "/media/boris/data/panopt/tjurbacken/map/reconstruction/0/";
        var recTxtPath = "/media/boris/data/panopt/tjurbacken/DJI_0007/reconstruction_txt/";

        // var imagesPath = "/media/boris/data/panopt/tjurbacken/DJI_0007/reconstruction_txt/images.txt";
        // var imagesPathBin = "/media/boris/data/panopt/tjurbacken/DJI_0007/reconstruction/images.bin";
        //var imagesPathBin = "/media/boris/data/panopt/tjurbacken/map/reconstruction/0/images.bin";
        // var points3DPath = "/media/boris/data/panopt/tjurbacken/DJI_0007/reconstruction_txt/points3D.txt";
        // var points3DPathBin = "/media/boris/data/panopt/tjurbacken/DJI_0007/reconstruction/points3D.bin";


        var rec = new Reconstruction(recBinPath);

        foreach (var img in rec.Images())
        {
            Console.WriteLine($"img ->{img}<-");
        }

        foreach (var point in rec.Points())
        {
            Console.WriteLine($"p3d ->{point}<-");
        }
    }
Beispiel #9
0
        public void Evaluate(int SpreadMax)
        {
            this.VoxelResolutionX = this.FInVX[0];
            this.VoxelResolutionY = this.FInVY[0];
            this.VoxelResolutionZ = this.FInVZ[0];
            this.VoxelsPerMeter   = this.FInVPM[0];

            if (this.FTextureOutput[0] == null)
            {
                this.FTextureOutput[0] = new DX11Resource <DX11DynamicTexture2D>();
            }
            if (this.FPCOut[0] == null)
            {
                this.FPCOut[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }
            if (this.FGeomOut[0] == null)
            {
                this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FOutVoxels[0] == null)
            {
                this.FOutVoxels[0] = new DX11Resource <IDX11ReadableStructureBuffer>();
            }

            if (this.FInExport[0])
            {
                this.FGeomOut[0].Dispose(); this.FGeomOut[0] = new DX11Resource <DX11IndexedGeometry>();
            }

            if (this.FInvalidateConnect)
            {
                this.FInvalidateConnect = false;

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    this.runtime = this.FInRuntime[0];
                    this.runtime.DepthFrameReady += this.runtime_DepthFrameReady;



                    // Depth frames generated from the depth input
                    this.depthFloatBuffer = new FusionFloatImageFrame(width, height);

                    // Point cloud frames generated from the depth float input
                    this.pointCloudBuffer = new FusionPointCloudImageFrame(width, height);

                    // Create images to raycast the Reconstruction Volume
                    this.shadedSurfaceColorFrame = new FusionColorImageFrame(width, height);
                }
            }

            if (this.FInVPM.IsChanged || this.FInVX.IsChanged || this.FInVY.IsChanged || this.FInVZ.IsChanged)
            {
                if (this.volume != null)
                {
                    this.volume.Dispose();
                }

                var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);
                this.worldToCameraTransform = Matrix4.Identity;

                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                this.ResetReconstruction();
            }

            if (this.runtime != null)
            {
                bool needreset = this.FInReset[0];

                if (needreset)
                {
                    this.ResetReconstruction();
                }
            }
        }
Beispiel #10
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Check to ensure suitable DirectX11 compatible hardware exists before initializing Kinect Fusion
            try
            {
                string deviceDescription  = string.Empty;
                string deviceInstancePath = string.Empty;
                int    deviceMemory       = 0;

                FusionDepthProcessor.GetDeviceInfo(
                    ProcessorType, DeviceToUse, out deviceDescription, out deviceInstancePath, out deviceMemory);
            }
            catch (IndexOutOfRangeException)
            {
                // Thrown when index is out of range for processor type or there is no DirectX11 capable device installed.
                // As we set -1 (auto-select default) for the DeviceToUse above, this indicates that there is no DirectX11
                // capable device. The options for users in this case are to either install a DirectX11 capable device
                // (see documentation for recommended GPUs) or to switch to non-real-time CPU based reconstruction by
                // changing ProcessorType to ReconstructionProcessor.Cpu
                this.statusBarText.Text = Properties.Resources.NoDirectX11CompatibleDeviceOrInvalidDeviceIndex;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }

            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
                return;
            }

            // Turn on the depth stream to receive depth frames
            this.sensor.DepthStream.Enable(DepthImageResolution);

            this.frameDataLength = this.sensor.DepthStream.FramePixelDataLength;

            // Create local depth pixels buffer
            this.depthImagePixels = new DepthImagePixel[this.frameDataLength];

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[this.frameDataLength];

            // This is the bitmap we'll display on-screen
            this.colorBitmap = new WriteableBitmap(
                (int)ImageSize.Width,
                (int)ImageSize.Height,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);

            // Set the image we display to point to the bitmap where we'll put the image data
            this.Image.Source = this.colorBitmap;

            // Add an event handler to be called whenever there is new depth frame data
            this.sensor.DepthFrameReady += this.SensorDepthFrameReady;

            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPoseByMinDepthThreshold)
                {
                    // Reset the reconstruction if we need to add a custom world-volume transformation
                    this.ResetReconstruction();
                }
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer = new FusionFloatImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Start the sensor!
            try
            {
                this.sensor.Start();
            }
            catch (IOException ex)
            {
                // Device is in use
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }

            // Set Near Mode by default
            try
            {
                this.sensor.DepthStream.Range   = DepthRange.Near;
                this.checkBoxNearMode.IsChecked = true;
            }
            catch (InvalidOperationException)
            {
                // Near mode not supported on device, silently fail during initialization
                this.checkBoxNearMode.IsEnabled = false;
            }

            // Initialize and start the FPS timer
            this.fpsTimer          = new DispatcherTimer();
            this.fpsTimer.Tick    += new EventHandler(this.FpsTimerTick);
            this.fpsTimer.Interval = new TimeSpan(0, 0, FpsInterval);

            this.fpsTimer.Start();

            this.lastFPSTimestamp = DateTime.UtcNow;
        }
Beispiel #11
0
        /// <summary>
        /// Execute startup tasks
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            // Look through all sensors and start the first connected one.
            // This requires that a Kinect is connected at the time of app startup.
            // To make your app robust against plug/unplug,
            // it is recommended to use KinectSensorChooser provided in Microsoft.Kinect.Toolkit
            foreach (var potentialSensor in KinectSensor.KinectSensors)
            {
                if (potentialSensor.Status == KinectStatus.Connected)
                {
                    this.sensor = potentialSensor;
                    break;
                }
            }

            if (null == this.sensor)
            {
                this.statusBarText.Text = Properties.Resources.NoKinectReady;
                return;
            }

            // Turn on the depth stream to receive depth frames
            this.sensor.DepthStream.Enable(DepthImageResolution);

            this.frameDataLength = this.sensor.DepthStream.FramePixelDataLength;

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[this.frameDataLength];

            // This is the bitmap we'll display on-screen
            this.colorBitmap = new WriteableBitmap(
                (int)ImageSize.Width,
                (int)ImageSize.Height,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);

            // Set the image we display to point to the bitmap where we'll put the image data
            this.Image.Source = this.colorBitmap;

            // Add an event handler to be called whenever there is new depth frame data
            this.sensor.DepthFrameReady += this.SensorDepthFrameReady;

            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPoseByMinDepthThreshold)
                {
                    this.ResetReconstruction();
                }
            }
            catch (InvalidOperationException ex)
            {
                this.statusBarText.Text = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                this.statusBarText.Text = this.statusBarText.Text = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer = new FusionFloatImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)ImageSize.Width, (int)ImageSize.Height);

            // Start the sensor!
            try
            {
                this.sensor.Start();
            }
            catch (IOException ex)
            {
                // Device is in use
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }
            catch (InvalidOperationException ex)
            {
                // Device is not valid, not supported or hardware feature unavailable
                this.sensor             = null;
                this.statusBarText.Text = ex.Message;

                return;
            }

            // Set Near Mode by default
            try
            {
                this.sensor.DepthStream.Range = DepthRange.Near;
                checkBoxNearMode.IsChecked    = true;
            }
            catch
            {
                // device not near mode capable
            }

            // Initialize and start the FPS timer
            this.fpsTimer          = new DispatcherTimer();
            this.fpsTimer.Tick    += new EventHandler(this.FpsTimerTick);
            this.fpsTimer.Interval = new TimeSpan(0, 0, FpsInterval);

            this.fpsTimer.Start();

            // Reset the reconstruction
            this.ResetReconstruction();
        }
Beispiel #12
0
 public string Reconstruction(Reconstruction recons)
 {
     return(Table[_i].Recons.Get(recons));
 }
        private void InitFusion()
        {
            if (_isFusionInitialized)
            {
                return;
            }

            _isFusionInitialized = true;

            var depthFormat = KinectSensor.DepthStream.Format;
            var depthSize   = FormatHelper.GetDepthSize(depthFormat);

            _fusionWorkItemPool = new Pool <FusionWorkItem, DepthImageFormat>(5, depthFormat, FusionWorkItem.Create);

            _fusionWorkQueue = new WorkQueue <FusionWorkItem>(ProcessFusionFrameBackground)
            {
                CanceledCallback = ReturnFusionWorkItem,
                MaxQueueLength   = 2
            };

            this.frameDataLength = KinectSensor.DepthStream.FramePixelDataLength;

            // Allocate space to put the color pixels we'll create
            this.colorPixels = new int[(int)(depthSize.Width * 2 * depthSize.Height * 2)];

            // This is the bitmap we'll display on-screen
            this.colorFusionBitmap = new WriteableBitmap(
                (int)depthSize.Width * 2,
                (int)depthSize.Height * 2,
                96.0,
                96.0,
                PixelFormats.Bgr32,
                null);
            FusionOutputImage = colorFusionBitmap;


            var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);

            // Set the world-view transform to identity, so the world origin is the initial camera location.
            this.worldToCameraTransform = Matrix4.Identity;

            try
            {
                // This creates a volume cube with the Kinect at center of near plane, and volume directly
                // in front of Kinect.
                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, DeviceToUse, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                if (this.translateResetPose)
                {
                    this.ResetReconstruction(_currentVolumeCenter);
                }
            }
            catch (ArgumentException)
            {
                FusionStatusMessage = "ArgumentException - DX11 GPU not found?";
                return;
            }
            catch (InvalidOperationException ex)
            {
                FusionStatusMessage = ex.Message;
                return;
            }
            catch (DllNotFoundException)
            {
                FusionStatusMessage = Properties.Resources.MissingPrerequisite;
                return;
            }

            // Depth frames generated from the depth input
            this.depthFloatBuffer    = new FusionFloatImageFrame((int)depthSize.Width, (int)depthSize.Height);
            this.residualFloatBuffer = new FusionFloatImageFrame((int)depthSize.Width, (int)depthSize.Height);
            _residualData            = new float[(int)(depthSize.Width * depthSize.Height)];

            // Point cloud frames generated from the depth float input
            this.pointCloudBuffer = new FusionPointCloudImageFrame((int)depthSize.Width * 2, (int)depthSize.Height * 2);

            // Create images to raycast the Reconstruction Volume
            this.shadedSurfaceColorFrame = new FusionColorImageFrame((int)depthSize.Width * 2, (int)depthSize.Height * 2);

            // Reset the reconstruction
            this.ResetReconstruction(_currentVolumeCenter);

            _audioManager.Start();
        }
Beispiel #14
0
 internal void Deinitialize()
 {
     this.mReconstruction = null;
     this.mHasInitialized = false;
 }
Beispiel #15
0
        public void Evaluate(int SpreadMax)
        {
            this.VoxelResolutionX = this.FInVX[0];
            this.VoxelResolutionY = this.FInVY[0];
            this.VoxelResolutionZ = this.FInVZ[0];
            this.VoxelsPerMeter = this.FInVPM[0];

            if (this.FTextureOutput[0] == null) { this.FTextureOutput[0] = new DX11Resource<DX11DynamicTexture2D>(); }
            if (this.FPCOut[0] == null) { this.FPCOut[0] = new DX11Resource<IDX11ReadableStructureBuffer>(); }
            if (this.FGeomOut[0] == null) { this.FGeomOut[0] = new DX11Resource<DX11IndexedGeometry>(); }

            if (this.FOutVoxels[0] == null) { this.FOutVoxels[0] = new DX11Resource<IDX11ReadableStructureBuffer>(); }

            if (this.FInExport[0]) { this.FGeomOut[0].Dispose(); this.FGeomOut[0] = new DX11Resource<DX11IndexedGeometry>(); }

            if (this.FInvalidateConnect)
            {
                this.FInvalidateConnect = false;

                if (this.FInRuntime.PluginIO.IsConnected)
                {
                    this.runtime = this.FInRuntime[0];
                    this.runtime.DepthFrameReady += this.runtime_DepthFrameReady;

                    // Depth frames generated from the depth input
                    this.depthFloatBuffer = new FusionFloatImageFrame(width, height);

                    // Point cloud frames generated from the depth float input
                    this.pointCloudBuffer = new FusionPointCloudImageFrame(width, height);

                    // Create images to raycast the Reconstruction Volume
                    this.shadedSurfaceColorFrame = new FusionColorImageFrame(width, height);

                }
            }

            if (this.FInVPM.IsChanged || this.FInVX.IsChanged || this.FInVY.IsChanged || this.FInVZ.IsChanged)
            {
                if (this.volume != null)
                {
                    this.volume.Dispose();
                }

                var volParam = new ReconstructionParameters(VoxelsPerMeter, VoxelResolutionX, VoxelResolutionY, VoxelResolutionZ);
                this.worldToCameraTransform = Matrix4.Identity;

                this.volume = Reconstruction.FusionCreateReconstruction(volParam, ProcessorType, 0, this.worldToCameraTransform);

                this.defaultWorldToVolumeTransform = this.volume.GetCurrentWorldToVolumeTransform();

                this.ResetReconstruction();
            }

            if (this.runtime != null)
            {
                bool needreset = this.FInReset[0];

                if (needreset) { this.ResetReconstruction(); }
            }
        }
Beispiel #16
0
        public override bool DestroyReconstruction(Reconstruction reconstruction)
        {
            ReconstructionImpl reconstructionImpl = reconstruction as ReconstructionImpl;

            return(reconstructionImpl != null && VuforiaWrapper.Instance.SmartTerrainBuilderRemoveReconstruction(reconstructionImpl.NativePtr) == 1);
        }
Beispiel #17
0
 public abstract bool DestroyReconstruction(Reconstruction reconstruction);
Beispiel #18
0
 public string Reconstruction(Reconstruction recons)
 {
     return Table[_i].Recons.Get(recons);
 }