public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
 {
     lock (_lock)
     {
         using (var frameset = FrameSet.FromFrame(frame))
         {
             using (var depth = frameset.DepthFrame)
             {
                 using (var texture = frameset.FirstOrDefault <VideoFrame>(_textureStream))
                 {
                     _videoStreamFilter[depth.Profile.Stream].CopyProfile(depth);
                     _videoStreamFilter[texture.Profile.Stream].CopyProfile(texture);
                     if (_currVideoStreamFilter.Count == 0 ||
                         !_currVideoStreamFilter[depth.Profile.Stream].Equals(_videoStreamFilter[depth.Profile.Stream]) ||
                         !_currVideoStreamFilter[texture.Profile.Stream].Equals(_videoStreamFilter[texture.Profile.Stream]))
                     {
                         ResetProcessingBlock();
                         _currVideoStreamFilter[depth.Profile.Stream]   = new RsVideoStreamRequest(depth);
                         _currVideoStreamFilter[texture.Profile.Stream] = new RsVideoStreamRequest(texture);
                     }
                     var points = _pb.Calculate(depth, releaser);
                     _pb.MapTexture(texture);
                     return(frameSource.AllocateCompositeFrame(releaser, depth, points).AsFrame());
                 }
             }
         }
     }
 }
    internal void ProcessFrames(FrameSet frames, FrameSource src, FramesReleaser releaser, Action <Frame> handleFrame, Action <FrameSet> handleFrameSet)
    {
        var pbs = _processingBlocks.OrderBy(i => i.Order).Where(i => i.Enabled).ToList();

        foreach (var vpb in pbs)
        {
            FrameSet processedFrames = frames;
            switch (vpb.ProcessingType)
            {
            case ProcessingBlockType.Single:
                processedFrames = HandleSingleFrameProcessingBlocks(frames, src, releaser, vpb, handleFrame);
                break;

            case ProcessingBlockType.Multi:
                processedFrames = HandleMultiFramesProcessingBlocks(frames, src, releaser, vpb, handleFrameSet);
                break;
            }
            frames = processedFrames;
        }

        handleFrameSet(frames);
        foreach (var fr in frames)
        {
            using (fr)
                handleFrame(fr);
        }
    }
    private Frame ApplyFilter(Frame frame, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock vpb, Action <Frame> handleFrame)
    {
        if (!vpb.CanProcess(frame))
        {
            return(frame);
        }

        // run the processing block.
        var processedFrame = vpb.Process(frame, frameSource, framesReleaser);

        // incase fork is requested, notify on new frame and use the original frame for the new frameset.
        if (vpb.Fork())
        {
            handleFrame(processedFrame);
            processedFrame.Dispose();
            return(frame);
        }

        // avoid disposing the frame incase the filter returns the original frame.
        if (processedFrame == frame)
        {
            return(frame);
        }

        // replace the current frame with the processed one to be used as the input to the next iteration (next filter)
        frame.Dispose();
        return(processedFrame);
    }
        private void SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, ThresholdFilter threshold)
        {
            // Setup / start frame processing
            processingBlock = new CustomProcessingBlock((f, src) =>
            {
                // We create a FrameReleaser object that would track
                // all newly allocated .NET frames, and ensure deterministic finalization
                // at the end of scope.
                using (var releaser = new FramesReleaser())
                {
                    using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                    {
                        var processedFrames = frames
                                              .ApplyFilter(decimate).DisposeWith(releaser)
                                              .ApplyFilter(spatial).DisposeWith(releaser)
                                              .ApplyFilter(temp).DisposeWith(releaser)
                                              .ApplyFilter(holeFill).DisposeWith(releaser)
                                              .ApplyFilter(colorizer).DisposeWith(releaser)
                                              .ApplyFilter(threshold).DisposeWith(releaser);

                        // Send it to the next processing stage
                        src.FrameReady(processedFrames);
                    }
                }
            });
        }
Example #5
0
 public override FrameSet Process(FrameSet frameset, FramesReleaser releaser)
 {
     lock (_lock)
     {
         using (var depth = frameset.DepthFrame)
             using (var color = frameset.ColorFrame)
                 if (_profilesIds.Count == 0 != !_profilesIds.ContainsValue(color.Profile.UniqueID) || !_profilesIds.ContainsValue(depth.Profile.UniqueID))
                 {
                     ResetAligner();
                     _profilesIds[Stream.Depth] = depth.Profile.UniqueID;
                     _profilesIds[Stream.Color] = color.Profile.UniqueID;
                 }
         return(_enabled ? _pb.Process(frameset, releaser) : frameset);
     }
 }
Example #6
0
        private CustomProcessingBlock SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, Align align_to)
        {
            CustomProcessingBlock processingBlock = null;

            if (showType == imgType.color)
            {
                processingBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                        {
                            var processedFrames = frames
                                                  .ApplyFilter(align_to).DisposeWith(releaser);
                            // Send it to the next processing stage
                            src.FramesReady(processedFrames);
                        }
                    }
                });
            }
            else if (showType == imgType.mix)
            {
                // Setup / start frame processing
                processingBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                        {
                            var processedFrames = frames
                                                  .ApplyFilter(align_to).DisposeWith(releaser)
                                                  .ApplyFilter(decimate).DisposeWith(releaser)
                                                  .ApplyFilter(spatial).DisposeWith(releaser)
                                                  .ApplyFilter(temp).DisposeWith(releaser)
                                                  .ApplyFilter(holeFill).DisposeWith(releaser)
                                                  .ApplyFilter(colorizer).DisposeWith(releaser);

                            // Send it to the next processing stage
                            src.FramesReady(processedFrames);
                        }
                    }
                });
            }

            return(processingBlock);
        }
Example #7
0
    private FrameSet HandleMultiFramesProcessingBlocks(FrameSet frameSet, FramesReleaser framesReleaser)
    {
        // multy frames filters
        var pbs = Instance.m_processingBlocks.OrderBy(i => i.Order).Where(i => i.Enabled).ToList();

        foreach (var vpb in pbs)
        {
            if (!(vpb is MultiFrameVideoProcessingBlock))
            {
                continue;
            }
            var pb = vpb as MultiFrameVideoProcessingBlock;
            if (pb.CanProcess(frameSet))
            {
                frameSet = pb.Process(frameSet, framesReleaser);
            }
        }

        return(frameSet);
    }
    public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
    {
        if (!_enabled)
        {
            return(frame);
        }
        var org      = frame as VideoFrame;
        var stride   = org.Width * org.BitsPerPixel / 8;
        var newFrame = frameSource.AllocateVideoFrame(org.Profile, org, org.BitsPerPixel, org.Width, org.Height, stride, Extension.DepthFrame);

        if (_pixels == null || org.Profile.UniqueID != _uniqueID)
        {
            InitPixels(org);
        }
        Marshal.Copy(org.Data, _pixels, 0, _pixels.Length);
        for (int i = 0; i < _pixels.Length; i++)
        {
            _pixels[i] = (short)(_pixels[i] >> _depthResolution);
            _pixels[i] = (short)(_pixels[i] << _depthResolution);
        }
        Marshal.Copy(_pixels, 0, newFrame.Data, _pixels.Length);
        return(newFrame);
    }
 private FrameSet HandleMultiFramesProcessingBlocks(FrameSet frameSet, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock videoProcessingBlock, Action <FrameSet> handleFrameSet)
 {
     using (var frame = frameSet.AsFrame())
     {
         if (videoProcessingBlock.CanProcess(frame))
         {
             using (var f = videoProcessingBlock.Process(frame, frameSource, framesReleaser))
             {
                 if (videoProcessingBlock.Fork())
                 {
                     handleFrameSet(FrameSet.FromFrame(f, framesReleaser));
                 }
                 else
                 {
                     return(FrameSet.FromFrame(f, framesReleaser));
                 }
             }
         }
     }
     return(frameSet);
 }
 public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
 {
     return _enabled ? _pb.ApplyFilter(frame as VideoFrame) : frame;
 }
 public abstract FrameSet Process(FrameSet frameset, FramesReleaser releaser);
 public abstract Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser);
        public MainWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                var pp = pipeline.Start(cfg);

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color) as VideoStreamProfile)
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f).DisposeWith(releaser);

                        var processedFrames = frames.ApplyFilter(decimate).DisposeWith(releaser)
                                              .ApplyFilter(spatial).DisposeWith(releaser)
                                              .ApplyFilter(temp).DisposeWith(releaser)
                                              .ApplyFilter(align).DisposeWith(releaser)
                                              .ApplyFilter(colorizer).DisposeWith(releaser);

                        var colorFrame     = processedFrames.ColorFrame.DisposeWith(releaser);
                        var colorizedDepth = processedFrames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = FrameSet.FromFrame(f))
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #14
0
 public override Frame Process(Frame frame, FrameSource frameSource, FramesReleaser releaser)
 {
     return(_enabled ? _pb.Colorize(frame as VideoFrame) : frame);
 }
        void RunProcess()
        {
            try
            {
                xmlRead();
                Init();

                //thread starting
                Task.Factory.StartNew(() =>
                {
                    while (!ctokenSource.Token.IsCancellationRequested)
                    {
                        using (var releaser = new FramesReleaser())
                        {
                            using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                            {
                                //depth frame apply filter

                                var processedFrames = frames
                                                      //.ApplyFilter(align_to).DisposeWith(releaser)
                                                      .ApplyFilter(decimationFilter).DisposeWith(releaser)
                                                      .ApplyFilter(spatialFilter).DisposeWith(releaser)
                                                      .ApplyFilter(temporalFilter).DisposeWith(releaser)
                                                      .ApplyFilter(holeFilter).DisposeWith(releaser)
                                                      //.ApplyFilter(thresholdFilter).DisposeWith(releaser)
                                                      .ApplyFilter(colorizer).DisposeWith(releaser);

                                using (var filteredFrames = FrameSet.FromFrame(processedFrames))
                                {
                                    var colorFrame    = filteredFrames.ColorFrame.DisposeWith(filteredFrames);
                                    var depthFrame    = filteredFrames.DepthFrame.DisposeWith(filteredFrames);
                                    var infraredFrame = filteredFrames.InfraredFrame.DisposeWith(filteredFrames);


                                    calibrationProcess(depthFrame, colorFrame, infraredFrame);

                                    if (isCalibrated)
                                    {
                                        depthFrame     = calcWallSensitive(depthFrame);
                                        objectLocation = findObjectLocation(depthFrame);


                                        if (objectLocation.X != 0 || objectLocation.Y != 0 || objectLocation.Z != 0)
                                        {
                                            Point clickLoc = objectLocationConverter(objectLocation); //calc to object location and rate screen resolution

                                            if (togIsClickable.IsChecked == true)
                                            {
                                                MouseEvents.MouseLeftClick(clickLoc.X, clickLoc.Y);  //mouse left clicking
                                            }
                                        }
                                    }

                                    var depthFrameColorized = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(filteredFrames);
                                    Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, depthFrameColorized);
                                    Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                                    Dispatcher.Invoke(DispatcherPriority.Render, updateIR1, infraredFrame);
                                    Dispatcher.Invoke(DispatcherPriority.Render, updateIR2, infraredFrame);

                                    colorFrame.Dispose();
                                    depthFrame.Dispose();
                                    infraredFrame.Dispose();
                                    depthFrameColorized.Dispose();
                                }
                            }
                        }
                    }
                }, ctokenSource.Token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Example #16
0
    private FrameSet HandleSingleFrameProcessingBlocks(FrameSet frameSet, FrameSource frameSource, FramesReleaser framesReleaser, RsProcessingBlock videoProcessingBlock, Action <Frame> handleFrame)
    {
        // single frame filters
        List <Frame> processedFrames = new List <Frame>();

        foreach (var frame in frameSet)
        {
            var currFrame = ApplyFilter(frame, frameSource, framesReleaser, videoProcessingBlock, handleFrame);

            // cache the pocessed frame
            processedFrames.Add(currFrame);
            if (frame != currFrame)
            {
                frame.Dispose();
            }
        }

        // Combine the frames into a single frameset
        var newFrameSet = frameSource.AllocateCompositeFrame(framesReleaser, processedFrames.ToArray());

        foreach (var f in processedFrames)
        {
            f.Dispose();
        }

        return(newFrameSet);
    }
Example #17
0
        private void StartCapture(int networkHeight, ComputationMode computationMode)
        {
            try {
                bool bDevicesFound = QueryRealSenseDevices();
                if (bDevicesFound == false)
                {
                    Console.WriteLine("Cannot start acquisition as no RealSense is connected.");
                    toggleStartStop.IsChecked    = false;
                    this.toggleStartStop.Content = "\uF5B0";
                    // Enable all controls
                    this.computationBackend.IsEnabled = true;
                    this.networkSlider.IsEnabled      = true;
                    // Stop demo

                    string acq_msg = string.Format("Acquisition Status:\t OFFLINE");
                    acquisition_status.Dispatcher.BeginInvoke((Action) delegate { acquisition_status.Text = acq_msg; });
                    return;
                }

                // get the selected image width and height
                int nImageWidth  = sensorResolutions[resolutionOptionBox.SelectedIndex].Width;
                int nImageHeight = sensorResolutions[resolutionOptionBox.SelectedIndex].Height;

                Console.WriteLine(
                    string.Format("Enabling the {0} S.No: {1}",
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.Name],
                                  availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]));
                Console.WriteLine(
                    string.Format("Selected resolution for the image acquisition is {0}x{1}", nImageWidth, nImageHeight));
                Console.WriteLine(string.Format("Selected network size: {0} along with {1} as the computation device",
                                                networkHeight,
                                                computationMode));
                selectedDeviceSerial = availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber];
                // Create and config the pipeline to stream color and depth frames.
                cfg.EnableDevice(availableDevices[camera_source.SelectedIndex].Info[CameraInfo.SerialNumber]);
                cfg.EnableStream(Intel.RealSense.Stream.Color, nImageWidth, nImageHeight, Format.Bgr8, framerate: 30);
                cfg.EnableStream(Intel.RealSense.Stream.Depth, nImageWidth, nImageHeight, framerate: 30);

                Task.Factory.StartNew(() => {
                    try {
                        // Create and config the pipeline to stream color and depth frames.
                        pp = pipeline.Start(cfg);
                        intrinsicsDepthImagerMaster =
                            (pp.GetStream(Intel.RealSense.Stream.Depth).As <VideoStreamProfile>()).GetIntrinsics();

                        // Initialise cubemos DNN framework with the required deep learning model and the target compute
                        // device. Currently CPU and GPU are supported target devices. FP32 model is necessary for the
                        // CPU and FP16 model is required by the Myriad device and GPU

                        Cubemos.SkeletonTracking.Api skeletontrackingApi;

                        String cubemosModelDir = Common.DefaultModelDir();

                        var computeDevice = Cubemos.TargetComputeDevice.CM_CPU;
                        var modelFile     = cubemosModelDir + "\\fp32\\skeleton-tracking.cubemos";

                        if (computationMode == ComputationMode.GPU)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_GPU;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }
                        else if (computationMode == ComputationMode.MYRIAD)
                        {
                            computeDevice = Cubemos.TargetComputeDevice.CM_MYRIAD;
                            modelFile     = cubemosModelDir + "\\fp16\\skeleton-tracking.cubemos";
                        }

                        var licenseFolder = Common.DefaultLicenseDir();
                        try {
                            skeletontrackingApi = new SkeletonTracking.Api(licenseFolder);
                        }
                        catch (Exception ex) {
                            throw new Cubemos.Exception(
                                String.Format("Activation key or license key not found in {0}.\n " +
                                              "If you haven't activated the SDK yet, please run post_installation script as described in the Getting Started Guide to activate your license.",
                                              licenseFolder));
                        }

                        try {
                            skeletontrackingApi.LoadModel(computeDevice, modelFile);
                        }
                        catch (Exception ex) {
                            if (File.Exists(modelFile))
                            {
                                throw new Cubemos.Exception(
                                    "Internal error occured during model initialization. Please make sure your compute device satisfies the hardware system requirements.");
                            }
                            else
                            {
                                throw new Cubemos.Exception(
                                    string.Format("Model file \"{0}\" not found. Details: \"{1}\"", modelFile, ex));
                            }
                        }

                        Console.WriteLine("Finished initialization");

                        Stopwatch fpsStopwatch = new Stopwatch();
                        double fps             = 0.0;
                        int nFrameCnt          = 0;

                        bool firstRun = true;

                        Console.WriteLine("Starting image acquisition and skeleton keypoints");
                        while (!tokenSource.Token.IsCancellationRequested)
                        {
                            int pipelineID = 1;
                            if (bEnableTracking == false)
                            {
                                pipelineID = 0;
                            }

                            fpsStopwatch.Restart();

                            // We wait for the next available FrameSet and using it as a releaser object that would
                            // track all newly allocated .NET frames, and ensure deterministic finalization at the end
                            // of scope.
                            using (var releaser = new FramesReleaser())
                            {
                                using (var frames = pipeline.WaitForFrames())
                                {
                                    if (frames.Count != 2)
                                    {
                                        Console.WriteLine("Not all frames are available...");
                                    }

                                    var f = frames.ApplyFilter(align).DisposeWith(releaser).AsFrameSet().DisposeWith(
                                        releaser);

                                    var colorFrame = f.ColorFrame.DisposeWith(releaser);
                                    depthFrame     = f.DepthFrame.DisposeWith(releaser);

                                    var alignedDepthFrame = align.Process <DepthFrame>(depthFrame).DisposeWith(f);

                                    if (temporalFilterEnabled)
                                    {
                                        alignedDepthFrame = temp.Process <DepthFrame>(alignedDepthFrame).DisposeWith(f);
                                    }

                                    // We colorize the depth frame for visualization purposes
                                    var colorizedDepth =
                                        colorizer.Process <VideoFrame>(alignedDepthFrame).DisposeWith(f);

                                    // Preprocess the input image
                                    Bitmap inputImage    = FrameToBitmap(colorFrame);
                                    Bitmap inputDepthMap = FrameToBitmap((VideoFrame)colorizedDepth);

                                    // Run the inference on the preprocessed image
                                    List <SkeletonKeypoints> skeletonKeypoints;
                                    skeletontrackingApi.RunSkeletonTracking(
                                        ref inputImage, networkHeight, out skeletonKeypoints, pipelineID);

                                    if (firstRun)
                                    {
                                        Cnv2.Dispatcher.BeginInvoke((Action) delegate { Panel.SetZIndex(Cnv2, -1); },
                                                                    System.Windows.Threading.DispatcherPriority.Render);

                                        toggleStartStop.Dispatcher.BeginInvoke(
                                            (Action) delegate { toggleStartStop.IsEnabled = true; });

                                        firstRun = false;
                                    }

                                    Bitmap displayImage;
                                    if (bShowOnlySkeletons)
                                    {
                                        displayImage = new Bitmap(inputImage.Width, inputImage.Height);
                                        using (Graphics g = Graphics.FromImage(displayImage))
                                        {
                                            g.Clear(System.Drawing.Color.Black);
                                        }
                                    }
                                    else
                                    {
                                        displayImage = new Bitmap(inputImage);
                                    }

                                    Graphics graphics = Graphics.FromImage(displayImage);

                                    // Render the correct skeletons detected from the inference
                                    if (true == bRenderSkeletons)
                                    {
                                        renderSkeletons(
                                            skeletonKeypoints, nImageWidth, nImageHeight, bEnableTracking, graphics);
                                    }

                                    if (true == bRenderCoordinates)
                                    {
                                        renderCoordinates(skeletonKeypoints, nImageWidth, graphics);
                                    }

                                    if (false == bHideRenderImage)   // Render the final frame onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgColor, displayImage);
                                    }
                                    if (true == bRenderDepthMap)   // Overlay the depth map onto the display window
                                    {
                                        imgColor.Dispatcher.BeginInvoke(renderDelegate, imgDepth, inputDepthMap);
                                    }

                                    nFrameCnt++;
                                    fps += (double)(1000.0 / (double)fpsStopwatch.ElapsedMilliseconds);

                                    if (nFrameCnt % 25 == 0)
                                    {
                                        string msg = String.Format("FPS:\t\t\t{0:F2}", fps / nFrameCnt);
                                        fps_output.Dispatcher.BeginInvoke((Action) delegate { fps_output.Text = msg; });
                                        fps       = 0;
                                        nFrameCnt = 0;
                                    }

                                    string msg_person_count =
                                        string.Format("Person Count:\t\t{0}", skeletonKeypoints.Count);
                                    person_count.Dispatcher.BeginInvoke(
                                        (Action) delegate { person_count.Text = msg_person_count; });
                                }
                            }
                        }
                    }
                    catch (System.Exception exT) {
                        string errorMsg = string.Format(
                            "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"",
                            exT.Message);
                        Cnv2.Dispatcher.BeginInvoke(
                            new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
                    }
                }, tokenSource.Token);
            }
            catch (System.Exception ex) {
                string errorMsg = string.Format(
                    "Internal Error Occured. Application will now close.\nError Details:\n\n\"{0}\"", ex.Message);
                Cnv2.Dispatcher.BeginInvoke(
                    new InfoDialog.ShowInfoDialogDelegate(InfoDialog.ShowInfoDialog), "Error", errorMsg);
            }
        }