public void StreamColorDepth(String scanType) /* Stream Color and Depth Synchronously or Asynchronously */
        {
            bool sts = true;

            PXCM3DScan.Configuration scan_config = new PXCM3DScan.Configuration();
            String statusString;

            /* Create an instance of the PXCSenseManager interface */
            PXCMSenseManager pp = PXCMSenseManager.CreateInstance();

            if (pp == null)
            {
                form.UpdateStatus("Failed to create sense manager");
                return;
            }
            if (pp.captureManager == null)
            {
                form.UpdateStatus("Capture manager does not exist");
                return;
            }
            if (!form.IsModeLive())
            {
                pp.captureManager.SetFileName(form.GetFileName(), form.IsModeRecord());
            }

            /* Set Input Source */
            PXCMCapture.DeviceInfo dinfo2 = form.GetCheckedDevice();
            if (form.IsModeLive() || form.IsModeRecord())
            {
                pp.captureManager.FilterByDeviceInfo(dinfo2);
            }

            if (form.IsModeRecord())
            {
                // Delay recording frames until the scan starts
                pp.captureManager.SetPause(true);
            }
            else if (!form.IsModeLive())
            {
                // Disable real-time mode if we are playing back a file
                // to ensure that frames are not skipped.
                pp.captureManager.SetRealtime(false);
            }

            /* Set Color & Depth Resolution */
            PXCMCapture.Device.StreamProfile cinfo = form.GetColorConfiguration();
            if (cinfo.imageInfo.format != 0)
            {
                Single cfps = cinfo.frameRate.max;
                pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_COLOR, cinfo.imageInfo.width, cinfo.imageInfo.height, cfps);
            }

            PXCMCapture.Device.StreamProfile dinfo = form.GetDepthConfiguration();
            if (dinfo.imageInfo.format != 0)
            {
                Single dfps = dinfo.frameRate.max;
                pp.EnableStream(PXCMCapture.StreamType.STREAM_TYPE_DEPTH, dinfo.imageInfo.width, dinfo.imageInfo.height, dfps);
            }

            /* Initialization */
            FPSTimer timer = new FPSTimer(form);

            if (form.IsModeLive())
            {
                form.UpdateStatus("Initializing...");
            }
            /* Enable the 3D Scan video module */
            pxcmStatus result = pp.Enable3DScan();

            if (result != pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                pp.Close();
                pp.Dispose();
                form.UpdateStatus("Enable3DScan() returned " + result);
                return;
            }

            /* Initialize the camera system */
            result = pp.Init();
            form.UpdateStatus("");
            device = pp.captureManager.device;
            if (result >= pxcmStatus.PXCM_STATUS_NO_ERROR && device != null)
            {
                bool bAutoExpAndWBChanged     = false;
                bool bAutoExposureEnabled     = true;
                bool bAutoWhiteBalanceEnabled = true;

                /* Setup the scanning configuration */
                if (scanType == "Object")
                {
                    scan_config.mode = PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION;
                }
                else if (scanType == "Face")
                {
                    scan_config.mode = PXCM3DScan.ScanningMode.FACE;
                }
                else if (scanType == "Body")
                {
                    scan_config.mode = PXCM3DScan.ScanningMode.BODY;
                }
                else if (scanType == "Head")
                {
                    scan_config.mode = PXCM3DScan.ScanningMode.HEAD;
                }
                else if (scanType == "Full")
                {
                    scan_config.mode = PXCM3DScan.ScanningMode.VARIABLE;
                }

                /* Select the Targeting Options */
                scan_config.options = PXCM3DScan.ReconstructionOption.NONE;
                if (form.isSolidificationSelected())
                {
                    scan_config.options
                        |= (PXCM3DScan.ReconstructionOption.SOLIDIFICATION);
                }
                if (form.isTextureSelected())
                {
                    scan_config.options
                        |= (PXCM3DScan.ReconstructionOption.TEXTURE);
                }
                if (form.isLandmarksSelected())
                {
                    scan_config.options
                        |= (PXCM3DScan.ReconstructionOption.LANDMARKS);
                }
                //scan_config.useMarker = form.isUseMarkerChecked();

                scan_config.flopPreviewImage = form.isFlopPreviewImageSelected();

                /* Try to initialize the scanning system */
                PXCM3DScan scan = pp.Query3DScan();
                sts = false;
                if (scan == null)
                {
                    form.UpdateStatus("3DScan module not found.");
                }
                else
                {
                    result = scan.SetConfiguration(scan_config);
                    if (result < pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        scan.Dispose();

                        // Show the configuration related error code
                        switch (result)
                        {
                        case pxcmStatus.PXCM_STATUS_FEATURE_UNSUPPORTED:
                            form.UpdateStatus("Configuration not supported.");
                            break;

                        case pxcmStatus.PXCM_STATUS_ITEM_UNAVAILABLE:
                            form.UpdateStatus("Face module not found.");
                            break;

                        default:
                            form.UpdateStatus("SetConfiguration returned an error.");
                            break;
                        }
                    }
                    else
                    {
                        sts = true;
                    }
                }

                // Conditionally finish the initialization and enter the main loop
                if (sts == true)
                {
                    // Subscribe to recieve range and tracking alerts
                    scan.Subscribe(OnAlert);

                    Projection projection = new Projection(
                        pp.session, device, dinfo.imageInfo);

                    Boolean bScanning        = false;
                    Boolean bPlaybackStarted = false;
                    form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_SSd)));

                    while (form.reconstruct_requested || !form.GetStopState())
                    {
                        if (form.GetScanRequested()) /* one time latch */
                        {
                            form.Invoke(new Action(() => form.SetScanRequested(false)));

                            // Delay recording frames until the start of the scan is requested
                            if (form.IsModeRecord())
                            {
                                pp.captureManager.SetPause(false);
                            }

                            // If the max tri/vert controls are enabled,
                            // use the set values. Otherwise, disabled decimation
                            // by setting the values to zero.

                            /*
                             * scan_config.maxTriangles = form.getMaxTrianglesEnabledChecked()
                             *  ? form.getMaxTriangles() : 0;
                             * scan_config.maxVertices = form.getMaxVerticesEnabledChecked()
                             *  ? form.getMaxVertices() : 0;
                             */
                            // Request that the scan starts as soon as possible
                            scan_config.startScan = true;
                            scan.SetConfiguration(scan_config);

                            /* Update the status bar to help users understand what the detector is looking for */
                            if (form.IsModeLive())
                            {
                                if (scan_config.mode == PXCM3DScan.ScanningMode.OBJECT_ON_PLANAR_SURFACE_DETECTION)
                                {
                                    form.UpdateStatus("Object not detected. Place object on flat surface in center of view.");
                                }
                            }
                        }
                        else if (form.reconstruct_requested)
                        {
                            sts = SaveMesh(scan);
                        }

                        /* Get preview image from the 3D Scan video module */
                        if (!form.GetStopState())
                        {
                            /* Wait until a frame is ready: Synchronized or Asynchronous */
                            if (pp.AcquireFrame() < pxcmStatus.PXCM_STATUS_NO_ERROR)
                            {
                                projection.Dispose();
                                if (!form.IsModeLive())
                                {
                                    form.Invoke(new Action(() => form.EndScan()));
                                    sts = SaveMesh(scan);
                                }
                                break;
                            }

                            /* Get preview image from the 3D Scan video module */
                            PXCMImage preview_image = scan.AcquirePreviewImage();
                            pp.ReleaseFrame();

                            /* Display Image and Status */
                            if (preview_image != null)
                            {
                                form.SetBitmap(preview_image);

                                if (scan.IsScanning())
                                {
                                    statusString = "Scanning";
                                    timer.Tick(statusString + " ");
                                    if (bScanning == false) // Lazy initializer
                                    {
                                        bScanning = true;   // One way latch

                                        // Once the scanning process starts, we want to enable the Reconstruct button
                                        form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe)));

                                        // Object, head and body scanning with a rear facing camera involves walking
                                        // around the target, which effectivly exposes the camera to the full
                                        // environment, similar to a panorama. To avoid undesirable color
                                        // inconsistencies (realted to the response of the auto-exposure/wb changes),
                                        // it usually works best to disable them.
                                        // Note that these property changes are restored (below).
                                        if (device.deviceInfo.orientation
                                            == PXCMCapture.DeviceOrientation.DEVICE_ORIENTATION_REAR_FACING &&
                                            scan_config.mode != PXCM3DScan.ScanningMode.FACE &&
                                            form.IsModeLive())
                                        {
                                            bAutoExpAndWBChanged = true;
                                            bAutoExposureEnabled = device.QueryColorAutoExposure();
                                            device.SetColorAutoExposure(false);
                                            bAutoWhiteBalanceEnabled = device.QueryColorAutoWhiteBalance();
                                            device.SetColorAutoWhiteBalance(false);
                                        }
                                    }
                                }
                                else
                                {
                                    if (!form.IsModeLive() && !form.IsModeRecord()) // In playback mode, automatically request the scan
                                    {
                                        if (bPlaybackStarted == false)              // Lazy initializer
                                        {
                                            bPlaybackStarted    = true;             // One way latch
                                            form.scan_requested = true;
                                            form.Invoke(new Action(() => form.StartScanning(false)));
                                            form.Invoke(new Action(() => form.SetButtonState(sample3dscan.cs.MainForm.ButtonState.Ce_ESe)));
                                        }
                                    }
                                    else
                                    {
                                        if (!form.GetStopState())
                                        {
                                            if (isScanReady(form.landmarksChecked()))
                                            {
                                                form.Invoke(new Action(() => form.EnableReconstruction(true)));
                                            }
                                            else
                                            {
                                                form.Invoke(new Action(() => form.EnableReconstruction(false)));
                                            }
                                        }
                                    }
                                }
                                preview_image.Dispose();
                            }
                        }
                    }
                    projection.Dispose();
                    scan.Dispose();
                }
                // Restore the default camera properties
                if (bAutoExpAndWBChanged)
                {
                    device.SetColorAutoExposure(bAutoExposureEnabled);
                    device.SetColorAutoWhiteBalance(bAutoWhiteBalanceEnabled);
                }

                device.Dispose();
                device = null;
            }
            else
            {
                try { form.UpdateStatus(result + ""); }
                catch { }
                sts = false;
            }

            if (sts)
            {
                try { form.UpdateStatus(""); }
                catch { }
            }

            pp.Close();
            pp.Dispose();

            try { form.Invoke(new Action(() => form.ResetStop())); }
            catch { }
        }
Exemplo n.º 2
0
        private void AcquireThread()
        {
            // Stream data
            while (senseManager.AcquireFrame(true) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                if (click == true)
                {
                    Thread.Sleep(500);
                    click = false;
                }
                // Retrieve the results
                PXCM3DSeg segmentation = senseManager.Query3DSeg();

                if (segmentation != null)
                {
                    // Get the segmented image
                    PXCMImage segmentedImage = segmentation.AcquireSegmentedImage();

                    if (segmentedImage != null)
                    {
                        // Access the segmented image data
                        PXCMImage.ImageData segmentedImageData;
                        segmentedImage.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB32, out segmentedImageData);

                        // Lock the backdrop image bitmap bits into system memory and access its data
                        // (Reference: https://msdn.microsoft.com/en-us/library/5ey6h79d%28v=vs.110%29.aspx
                        // (Reference: http://csharpexamples.com/fast-image-processing-c/)
                        Rectangle  imageRect          = new Rectangle(0, 0, WIDTH, HEIGHT);
                        BitmapData backdropBitmapData = backdrop.LockBits(imageRect, ImageLockMode.ReadWrite, backdrop.PixelFormat);
                        int        bytesPerPixel      = Bitmap.GetPixelFormatSize(backdropBitmapData.PixelFormat) / 8;
                        int        widthInBytes       = WIDTH * bytesPerPixel;

                        for (int h = 0; h < HEIGHT; h++)
                        {
                            // Use unsafe keyword to work with pointers for faster image processing
                            // (Required setting: Project -> Properties -> Build -> Allow unsafe code)
                            unsafe
                            {
                                byte *segmentedImagePixel = (byte *)segmentedImageData.planes[0] + h * segmentedImageData.pitches[0];

                                for (int w = 0; w < widthInBytes; w = w + bytesPerPixel)
                                {
                                    byte *backdropPixel = (byte *)backdropBitmapData.Scan0 + (h * backdropBitmapData.Stride);

                                    // Substitute segmented background pixels (those containing an alpha channel of zero)
                                    // with pixels from the selected backdrop image, if the checkbox is selected
                                    if ((segmentedImagePixel[3] <= 0))
                                    {
                                        segmentedImagePixel[0] = backdropPixel[w];
                                        segmentedImagePixel[1] = backdropPixel[w + 1];
                                        segmentedImagePixel[2] = backdropPixel[w + 2];
                                    }

                                    segmentedImagePixel += 4;
                                }
                            }
                        }

                        // Unlock the backdrop image bitmap bits
                        backdrop.UnlockBits(backdropBitmapData);

                        // Export the image data to a bitmap
                        Bitmap bitmap = segmentedImageData.ToBitmap(0, segmentedImage.info.width, segmentedImage.info.height);

                        // Update the UI by delegating work to the Dispatcher associated with the UI thread
                        this.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal, new Action(delegate()
                        {
                            imgBackdrop.Source = ImageUtils.ConvertBitmapToWpf(bitmap);
                        }));

                        // Optionally save a snapshot of the image (captureSnapshot is set in the Capture button's event handler)
                        if (captureSnapshot)
                        {
                            bitmap.Save(path + "MyPic.jpg", System.Drawing.Imaging.ImageFormat.Jpeg);
                            captureSnapshot = false;
                        }

                        segmentedImage.ReleaseAccess(segmentedImageData);
                        segmentedImage.Dispose();
                        segmentation.Dispose();
                        bitmap.Dispose();
                    }
                }

                // Resume next frame processing
                senseManager.ReleaseFrame();
            }
        }
Exemplo n.º 3
0
        public void AdvancedPipeline()
        {
            PXCMSession session;
            pxcmStatus  sts = PXCMSession.CreateInstance(out session);

            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create an SDK session");
                return;
            }

            /* Set Module */
            PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc();
            desc.friendlyName.set(form.GetCheckedModule());

            PXCMEmotion emotionDet;

            sts = session.CreateImpl <PXCMEmotion>(ref desc, PXCMEmotion.CUID, out emotionDet);
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to create the emotionDet module");
                session.Dispose();
                return;
            }

            UtilMCapture capture = null;

            if (form.GetRecordState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), true);
                capture.SetFilter(form.GetCheckedDevice());
            }
            else if (form.GetPlaybackState())
            {
                capture = new UtilMCaptureFile(session, form.GetFileName(), false);
            }
            else
            {
                capture = new UtilMCapture(session);
                capture.SetFilter(form.GetCheckedDevice());
            }

            form.UpdateStatus("Pair moudle with I/O");
            for (uint i = 0; ; i++)
            {
                PXCMEmotion.ProfileInfo pinfo;
                sts = emotionDet.QueryProfile(i, out pinfo);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }
                sts = capture.LocateStreams(ref pinfo.inputs);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    continue;
                }
                sts = emotionDet.SetProfile(ref pinfo);
                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }
            }
            if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                form.UpdateStatus("Failed to pair the emotionDet module with I/O");
                capture.Dispose();
                emotionDet.Dispose();
                session.Dispose();
                return;
            }

            Camera.shouldConfigure = true;
            form.UpdateStatus("Calibrating");
            PXCMImage[] images            = new PXCMImage[PXCMCapture.VideoStream.STREAM_LIMIT];
            PXCMScheduler.SyncPoint[] sps = new PXCMScheduler.SyncPoint[2];
            while (!form.stop)
            {
                PXCMImage.Dispose(images);
                PXCMScheduler.SyncPoint.Dispose(sps);
                sts = capture.ReadStreamAsync(images, out sps[0]);
                if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST))
                {
                    continue;
                }
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                sts = emotionDet.ProcessImageAsync(images, out sps[1]);
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                PXCMScheduler.SyncPoint.SynchronizeEx(sps);
                sts = sps[0].Synchronize();
                if (DisplayDeviceConnection(sts == pxcmStatus.PXCM_STATUS_DEVICE_LOST))
                {
                    continue;
                }
                if (sts < pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    break;
                }

                /* Display Results */
                DisplayPicture(capture.QueryImage(images, PXCMImage.ImageType.IMAGE_TYPE_COLOR));
                DisplayLocation(emotionDet);
                form.UpdatePanel();
            }
            PXCMImage.Dispose(images);
            PXCMScheduler.SyncPoint.Dispose(sps);
            Camera.x = Camera.stopX;
            Camera.y = Camera.stopY;
            capture.Dispose();
            emotionDet.Dispose();
            session.Dispose();
            form.UpdateStatus("Stopped");
        }
Exemplo n.º 4
0
        void OnDisable()
        {
            //Disposses all modules

            Initialized = false;
            if (SenseManager == null)
            {
                return;
            }

            DisposeFunctions.ForEach(i => i.DynamicInvoke());

            if (FaceModuleOutput != null)
            {
                FaceModuleOutput.Dispose();
                FaceModuleOutput = null;
            }
            if (HandDataOutput != null)
            {
                SenseManager.PauseHand(true);
                HandDataOutput.Dispose();
                HandDataOutput = null;
            }
            if (BlobDataOutput != null)
            {
                SenseManager.PauseBlob(true);
                BlobDataOutput.Dispose();
                BlobDataOutput = null;
            }
            if (ImageRgbOutput != null)
            {
                ImageRgbOutput.Dispose();
                ImageRgbOutput = null;
            }
            if (ImageDepthOutput != null)
            {
                ImageDepthOutput.Dispose();
                ImageDepthOutput = null;
            }

            if (ImageIROutput != null)
            {
                ImageIROutput.Dispose();
                ImageIROutput = null;
            }

            if (Image3DSegmentationOutput != null)
            {
                Image3DSegmentationOutput.Dispose();
                Image3DSegmentationOutput = null;
            }

            if (Projection != null)
            {
                Projection.Dispose();
                Projection = null;
            }

            /* GZ
             *          if (BlobExtractor != null)
             *          {
             *                  BlobExtractor.Dispose();
             *                  BlobExtractor = null;
             *          } */

            UvMap = null;

            PointCloud = null;

            SenseManager.Dispose();
            SenseManager = null;
        }
Exemplo n.º 5
0
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;
            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream))
            {
                trigger.ErrorDetected = true;
                Debug.LogError("Blob Analysis Module Not Set");
                return(false);
            }

            if (!(trigger is TrackTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            bool success = false;

            // make sure we have valid values

            if (RealWorldBoxDimensions.x <= 0)
            {
                RealWorldBoxDimensions.x = 1;
            }

            if (RealWorldBoxDimensions.y <= 0)
            {
                RealWorldBoxDimensions.y = 1;
            }

            if (RealWorldBoxDimensions.z <= 0)
            {
                RealWorldBoxDimensions.z = 1;
            }


            if (SenseToolkitManager.Instance != null && SenseToolkitManager.Instance.Initialized && SenseToolkitManager.Instance.BlobExtractor != null && SenseToolkitManager.Instance.ImageDepthOutput != null)
            {
                // Setting max distance for this rule and process the image
                PXCMBlobExtractor.BlobData _blobData = new PXCMBlobExtractor.BlobData();

                SenseToolkitManager.Instance.BlobExtractor.SetMaxDistance(MaxDistance * 10);

                var sts = SenseToolkitManager.Instance.BlobExtractor.ProcessImage(SenseToolkitManager.Instance.ImageDepthOutput);


                if (sts >= pxcmStatus.PXCM_STATUS_NO_ERROR && SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs() > 0)
                {
                    if (BlobIndex >= SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs())
                    {
                        return(false);
                    }

                    PXCMImage.ImageInfo info = SenseToolkitManager.Instance.ImageDepthOutput.QueryInfo();
                    info.format = PXCMImage.PixelFormat.PIXEL_FORMAT_Y8;
                    PXCMImage new_image = SenseToolkitManager.Instance.SenseManager.session.CreateImage(info);

                    // Process Tracking

                    SenseToolkitManager.Instance.BlobExtractor.QueryBlobData(BlobIndex, new_image, out _blobData);

                    new_image.Dispose();

                    TrackTrigger specificTrigger = (TrackTrigger)trigger;

                    PXCMPointI32 trackedPoint = BlobUtilityClass.GetTrackedPoint(_blobData, BlobPointToTrack);

                    PXCMImage.ImageData data;
                    SenseToolkitManager.Instance.ImageDepthOutput.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out data);

                    if (_depthArray == null)
                    {
                        _depthArray = new float[SenseToolkitManager.Instance.ImageDepthOutput.info.width * SenseToolkitManager.Instance.ImageDepthOutput.info.height];
                    }
                    data.ToFloatArray(0, _depthArray);

                    float depth = _depthArray[(int)trackedPoint.x + (int)trackedPoint.y * SenseToolkitManager.Instance.ImageDepthOutput.info.width];

                    if (_pos_uvz == null)
                    {
                        _pos_uvz = new PXCMPoint3DF32[1] {
                            new PXCMPoint3DF32()
                        };
                    }
                    _pos_uvz[0].x = trackedPoint.x;
                    _pos_uvz[0].y = trackedPoint.y;
                    _pos_uvz[0].z = depth;

                    if (_pos3d == null)
                    {
                        _pos3d = new PXCMPoint3DF32[1] {
                            new PXCMPoint3DF32()
                        };
                    }

                    SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_uvz, _pos3d);

                    Vector3 position = new Vector3();
                    position.x = -_pos3d[0].x / 10;
                    position.y = _pos3d[0].y / 10;
                    position.z = _pos3d[0].z / 10;

                    SenseToolkitManager.Instance.ImageDepthOutput.ReleaseAccess(data);

                    TrackingUtilityClass.ClampToRealWorldInputBox(ref position, RealWorldBoxCenter, RealWorldBoxDimensions);
                    TrackingUtilityClass.Normalize(ref position, RealWorldBoxCenter, RealWorldBoxDimensions);

                    if (!float.IsNaN(position.x) && !float.IsNaN(position.y) && !float.IsNaN(position.z))
                    {
                        specificTrigger.Position = position;
                    }
                    else
                    {
                        return(false);
                    }
                    success = true;
                }
            }
            else
            {
                return(false);
            }


            return(success);
        }
Exemplo n.º 6
0
        /* Displaying Depth/Mask Images - for depth image only we use a delay of NumberOfFramesToDelay to sync image with tracking */
        private unsafe void DisplayPicture(PXCMImage depth, PXCMHandData handAnalysis)
        {
            if (depth == null)
            {
                return;
            }

            PXCMImage image = depth;

            //Mask Image
            if (form.GetLabelmapState())
            {
                Bitmap labeledBitmap = null;
                try
                {
                    labeledBitmap = new Bitmap(image.info.width, image.info.height, PixelFormat.Format32bppRgb);
                }
                catch (Exception)
                {
                    image.Dispose();
                    return;
                }

                for (int j = 0; j < handAnalysis.QueryNumberOfHands(); j++)
                {
                    int id;
                    PXCMImage.ImageData data;

                    handAnalysis.QueryHandId(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, j, out id);
                    //Get hand by time of appearance
                    PXCMHandData.IHand handData;
                    handAnalysis.QueryHandData(PXCMHandData.AccessOrderType.ACCESS_ORDER_BY_TIME, j, out handData);
                    if (handData != null &&
                        (handData.QuerySegmentationImage(out image) >= pxcmStatus.PXCM_STATUS_NO_ERROR))
                    {
                        if (image.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_Y8,
                                                out data) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                        {
                            Rectangle rect = new Rectangle(0, 0, image.info.width, image.info.height);

                            BitmapData bitmapdata = labeledBitmap.LockBits(rect, ImageLockMode.ReadWrite, labeledBitmap.PixelFormat);
                            byte *     numPtr     = (byte *)bitmapdata.Scan0; //dst
                            byte *     numPtr2    = (byte *)data.planes[0];   //row
                            int        imagesize  = image.info.width * image.info.height;
                            byte       num2       = (byte)handData.QueryBodySide();

                            byte tmp = 0;
                            for (int i = 0; i < imagesize; i++, numPtr += 4, numPtr2++)
                            {
                                tmp       = (byte)(LUT[numPtr2[0]] * num2 * 100);
                                numPtr[0] = (Byte)(tmp | numPtr[0]);
                                numPtr[1] = (Byte)(tmp | numPtr[1]);
                                numPtr[2] = (Byte)(tmp | numPtr[2]);
                                numPtr[3] = 0xff;
                            }

                            bool isError = false;

                            try
                            {
                                labeledBitmap.UnlockBits(bitmapdata);
                            }
                            catch (Exception)
                            {
                                isError = true;
                            }
                            try
                            {
                                image.ReleaseAccess(data);
                            }
                            catch (Exception)
                            {
                                isError = true;
                            }

                            if (isError)
                            {
                                labeledBitmap.Dispose();
                                image.Dispose();
                                return;
                            }
                        }
                    }
                }
                if (labeledBitmap != null)
                {
                    form.DisplayBitmap(labeledBitmap);
                    labeledBitmap.Dispose();
                }
                image.Dispose();
            }//end label image

            //Depth Image
            else
            {
                //collecting 3 images inside a queue and displaying the oldest image
                PXCMImage.ImageInfo info;
                PXCMImage           image2;

                info   = image.QueryInfo();
                image2 = form.g_session.CreateImage(info);
                image2.CopyImage(image);
                m_images.Enqueue(image2);
                if (m_images.Count == NumberOfFramesToDelay)
                {
                    Bitmap depthBitmap;
                    try
                    {
                        depthBitmap = new Bitmap(image.info.width, image.info.height, PixelFormat.Format32bppRgb);
                    }
                    catch (Exception)
                    {
                        image.Dispose();
                        PXCMImage queImage = m_images.Dequeue();
                        queImage.Dispose();
                        return;
                    }

                    PXCMImage.ImageData data3;
                    PXCMImage           image3 = m_images.Dequeue();
                    if (image3.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out data3) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                    {
                        float fMaxValue = _maxRange;
                        byte  cVal;

                        Rectangle  rect       = new Rectangle(0, 0, image.info.width, image.info.height);
                        BitmapData bitmapdata = depthBitmap.LockBits(rect, ImageLockMode.ReadWrite, depthBitmap.PixelFormat);

                        byte * pDst = (byte *)bitmapdata.Scan0;
                        short *pSrc = (short *)data3.planes[0];
                        int    size = image.info.width * image.info.height;

                        for (int i = 0; i < size; i++, pSrc++, pDst += 4)
                        {
                            cVal = (byte)((*pSrc) / fMaxValue * 255);
                            if (cVal != 0)
                            {
                                cVal = (byte)(255 - cVal);
                            }

                            pDst[0] = cVal;
                            pDst[1] = cVal;
                            pDst[2] = cVal;
                            pDst[3] = 255;
                        }
                        try
                        {
                            depthBitmap.UnlockBits(bitmapdata);
                        }
                        catch (Exception)
                        {
                            image3.ReleaseAccess(data3);
                            depthBitmap.Dispose();
                            image3.Dispose();
                            return;
                        }

                        form.DisplayBitmap(depthBitmap);
                        image3.ReleaseAccess(data3);
                    }
                    depthBitmap.Dispose();
                    image3.Dispose();
                }
            }
        }
Exemplo n.º 7
0
        public byte[] ColorToDepthCoordinatesByInvUVMap(PXCMImage color, PXCMImage depth, int dots, out int cwidth, out int cheight)
        {
            /* Retrieve the color pixels */
            byte[] cpixels = color.GetRGB32Pixels(out cwidth, out cheight);

            if (projection == null || cpixels == null)
            {
                return(cpixels);
            }

            if (dots >= 9)
            { // A sample for CreateDepthImageMappedToColor output visualization
                PXCMImage.ImageData d2cDat;
                PXCMImage           d2c = projection.CreateDepthImageMappedToColor(depth, color);
                if (d2c == null)
                {
                    return(cpixels);
                }

                UInt16[] d2cpixels;
                if (d2c.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out d2cDat) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
                {
                    Int32 d2cwidth  = d2cDat.pitches[0] / sizeof(Int16); /* aligned width */
                    Int32 d2cheight = (Int32)d2c.info.height;
                    d2cpixels = d2cDat.ToUShortArray(0, d2cwidth * d2cheight);

                    for (Int32 y = 0; y < cheight; y++)
                    {
                        for (Int32 x = 0; x < cwidth; x++)
                        {
                            if (d2cpixels[y * d2cwidth + x] == invalid_value)
                            {
                                continue;                                               // no mapping based on unreliable depth values
                            }
                            cpixels[(y * cwidth + x) * 4] = 0xFF;
                        }
                    }

                    d2c.ReleaseAccess(d2cDat);
                }
                d2c.Dispose();
                return(cpixels);
            }

            /* Retrieve the depth pixels and uvmap */
            PXCMImage.ImageData ddata;
            Int16[]             dpixels;
            if (depth.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH, out ddata) >= pxcmStatus.PXCM_STATUS_NO_ERROR)
            {
                Int32 dpitch  = ddata.pitches[0] / sizeof(Int16); /* aligned width */
                Int32 dwidth  = (Int32)depth.info.width;
                Int32 dheight = (Int32)depth.info.height;
                dpixels = ddata.ToShortArray(0, dpitch * dheight);
                pxcmStatus sts        = projection.QueryInvUVMap(depth, invuvmap);
                Int32      invuvpitch = color.QueryInfo().width;
                depth.ReleaseAccess(ddata);

                if (dots > 1)
                { // If Depth data is valid just set a blue pixel
                    /* Draw dots onto the color pixels */
                    for (Int32 y = 0; y < cheight; y++)
                    {
                        for (Int32 x = 0; x < cwidth; x++)
                        {
                            Int32 xx = (Int32)(invuvmap[y * cwidth + x].x * dwidth);
                            Int32 yy = (Int32)(invuvmap[y * cwidth + x].y * dheight);
                            if (xx >= 0 && yy >= 0)
                            {
                                if (dpixels[yy * dpitch + xx] > 0)
                                {
                                    cpixels[(y * cwidth + x) * 4] = 0xFF;
                                }
                            }
                        }
                    }
                }
                else
                { // If Depth data is valid just set a blue pixel with briteness depends on Depth value
                    Int32   MAX_LOCAL_DEPTH_VALUE = 4000;
                    Int32[] depth_hist            = new Int32[MAX_LOCAL_DEPTH_VALUE];
                    Array.Clear(depth_hist, 0, depth_hist.Length);
                    Int32 num_depth_points = 0;
                    for (Int32 y = 0; y < dheight; y++)
                    {
                        for (Int32 x = 0; x < dwidth; x++)
                        {
                            Int16 d = dpixels[y * dpitch + x];
                            if (d > 0 && d < MAX_LOCAL_DEPTH_VALUE)
                            {
                                depth_hist[d]++;
                                num_depth_points++;
                            }
                        }
                    }

                    if (num_depth_points > 0)
                    {
                        for (Int32 i = 1; i < MAX_LOCAL_DEPTH_VALUE; i++)
                        {
                            depth_hist[i] += depth_hist[i - 1];
                        }
                        for (Int32 i = 1; i < MAX_LOCAL_DEPTH_VALUE; i++)
                        {
                            depth_hist[i] = 255 - (Int32)((float)255 * (float)depth_hist[i] / (float)num_depth_points);
                        }

                        /* Draw dots onto the color pixels */
                        for (Int32 y = 0; y < cheight; y++)
                        {
                            for (Int32 x = 0; x < cwidth; x++)
                            {
                                Int32 xx = (Int32)(invuvmap[y * cwidth + x].x * dwidth);
                                Int32 yy = (Int32)(invuvmap[y * cwidth + x].y * dheight);
                                if (xx >= 0 && yy >= 0)
                                {
                                    Int16 d = dpixels[yy * dpitch + xx];
                                    if (d > 0 && d < MAX_LOCAL_DEPTH_VALUE)
                                    {
                                        cpixels[(y * cwidth + x) * 4] = (byte)depth_hist[d];
                                    }
                                }
                            }
                        }
                    }
                }
            }
            return(cpixels);
        }
Exemplo n.º 8
0
        public override bool Process(Trigger trigger)
        {
            trigger.ErrorDetected = false;
            if (!SenseToolkitManager.Instance.IsSenseOptionSet(SenseOption.SenseOptionID.VideoDepthStream))
            {
                trigger.ErrorDetected = true;
                Debug.LogError("Depth Stream Not Set");
                return(false);
            }

            if (!(trigger is EventTrigger))
            {
                trigger.ErrorDetected = true;
                return(false);
            }

            EventTrigger specificTrigger = (EventTrigger)trigger;

            specificTrigger.Source = this.name;

            bool success = false;

            if (SenseToolkitManager.Instance.Initialized && SenseToolkitManager.Instance.BlobExtractor != null && SenseToolkitManager.Instance.ImageDepthOutput != null)
            {
                // Setting max distance for this rule and process the image
                SenseToolkitManager.Instance.BlobExtractor.SetMaxDistance(MaxDistance * 10);

                var sts = SenseToolkitManager.Instance.BlobExtractor.ProcessImage(SenseToolkitManager.Instance.ImageDepthOutput);

                if (sts == pxcmStatus.PXCM_STATUS_NO_ERROR && SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs() >= NumberOfBlobs)
                {
                    int numberOfBlobsDetected = SenseToolkitManager.Instance.BlobExtractor.QueryNumberOfBlobs();

                    int blobsRightlyDetected = 0;

                    PXCMBlobExtractor.BlobData blobData = new PXCMBlobExtractor.BlobData();

                    // For each detected blob, project the tracked point to the real world and
                    //  check that it is in our real world box
                    for (int i = 0; i < numberOfBlobsDetected; i++)
                    {
                        PXCMImage.ImageInfo info = SenseToolkitManager.Instance.ImageDepthOutput.QueryInfo();
                        info.format = PXCMImage.PixelFormat.PIXEL_FORMAT_Y8;
                        PXCMImage new_image = SenseToolkitManager.Instance.SenseManager.session.CreateImage(info);

                        // Process Tracking
                        SenseToolkitManager.Instance.BlobExtractor.QueryBlobData(i, new_image, out blobData);

                        new_image.Dispose();

                        PXCMPointI32 trackedPoint = BlobUtilityClass.GetTrackedPoint(blobData, BlobPointToTrack);

                        PXCMImage.ImageData data;
                        SenseToolkitManager.Instance.ImageDepthOutput.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_DEPTH_F32, out data);

                        if (_depthArray == null)
                        {
                            _depthArray = new float[SenseToolkitManager.Instance.ImageDepthOutput.info.width * SenseToolkitManager.Instance.ImageDepthOutput.info.height];
                        }
                        data.ToFloatArray(0, _depthArray);

                        float depth = _depthArray[(int)trackedPoint.x + (int)trackedPoint.y * SenseToolkitManager.Instance.ImageDepthOutput.info.width];

                        if (_pos_uvz == null)
                        {
                            _pos_uvz = new PXCMPoint3DF32[1] {
                                new PXCMPoint3DF32()
                            };
                        }
                        _pos_uvz[0].x = trackedPoint.x;
                        _pos_uvz[0].y = trackedPoint.y;
                        _pos_uvz[0].z = depth;

                        if (_pos3d == null)
                        {
                            _pos3d = new PXCMPoint3DF32[1] {
                                new PXCMPoint3DF32()
                            };
                        }

                        SenseToolkitManager.Instance.Projection.ProjectDepthToCamera(_pos_uvz, _pos3d);

                        Vector3 position = new Vector3();
                        position.x = -_pos3d[0].x / 10;
                        position.y = _pos3d[0].y / 10;
                        position.z = _pos3d[0].z / 10;

                        SenseToolkitManager.Instance.ImageDepthOutput.ReleaseAccess(data);

                        TrackingUtilityClass.ClampToRealWorldInputBox(ref position, RealWorldBoxCenter, RealWorldBoxDimensions);
                        TrackingUtilityClass.Normalize(ref position, RealWorldBoxCenter, RealWorldBoxDimensions);

                        if (!float.IsNaN(position.x) && !float.IsNaN(position.y) && !float.IsNaN(position.z))
                        {
                            if (position.x > 0 && position.x < 1 &&
                                position.y > 0 && position.y < 1 &&
                                position.z > 0 && position.z < 1)
                            {
                                blobsRightlyDetected++;

                                if (blobsRightlyDetected == NumberOfBlobs)
                                {
                                    break;
                                }
                            }
                        }
                        else
                        {
                            _lastFrameDetected = false;
                            return(false);
                        }
                    }


                    if (blobsRightlyDetected >= NumberOfBlobs)
                    {
                        if (!_lastFrameDetected)
                        {
                            success = true;
                        }
                        _lastFrameDetected = true;
                    }
                    else
                    {
                        _lastFrameDetected = false;
                    }
                }
                else
                {
                    _lastFrameDetected = false;
                }
            }
            else
            {
                return(false);
            }



            return(success);
        }