//called within reader_DepthFrameArrived Event iff depth frame is valid
        //modifies depth frame display and then calls the GMV calculation function
        private unsafe void ProcessDepthFrameData(IntPtr depthFrameData, uint depthFrameDataSize, ushort minDepth, ushort maxDepth)
        {
            ushort *frameData = (ushort *)depthFrameData;

            byte[] enhPixelData = new byte[depthFrameDescription.Width * depthFrameDescription.Height * 4];
            int[]  depth        = new int[depthFrameDescription.Width * depthFrameDescription.Height * bytesPerPixel];

            // convert depth to a visual representation
            for (int i = 0, j = 0; i < (int)(depthFrameDataSize / this.depthFrameDescription.BytesPerPixel); ++i, j += 4)
            {
                // Get the depth for this pixel
                depth[i] = (int)frameData[i];

                // To convert to a byte, we're discarding the most-significant
                // rather than least-significant bits.
                // We're preserving detail, although the intensity will "wrap."
                // Values outside the reliable depth range are mapped to 0 (black).
                this.depthPixels[i] = (byte)(depth[i] >= minDepth && depth[i] <= maxDepth ? depth[i] : 0);

                int gray; //used to set grayScale image if in !rgbDepthMode

                //if depth is outside slider set thresholds => set to zero, else => set the color for the enhpixel data structure for creating the UI display
                if (depth[i] < loDepthThreshold || depth[i] > hiDepthThreshold)
                {
                    gray     = 0xFF;
                    depth[i] = 0;
                }
                else
                {
                    //rgbDepthMode scales the pixel data's color value based on the depth thresholds
                    if (rgbDepthMode)
                    {
                        int    dist          = hiDepthThreshold - loDepthThreshold;
                        int    adjDepth      = depth[i] - loDepthThreshold;
                        double remappedDepth = adjDepth / (double)dist * (255 * 3);
                        if (remappedDepth > 255 * 2)
                        {
                            enhPixelData[j]     = 0;
                            enhPixelData[j + 1] = 0;
                            enhPixelData[j + 2] = (byte)(remappedDepth - (255 * 2));
                        }
                        else if (remappedDepth > 255)
                        {
                            enhPixelData[j]     = 0;
                            enhPixelData[j + 1] = (byte)(remappedDepth - (255));
                            enhPixelData[j + 2] = 0;
                        }
                        else
                        {
                            enhPixelData[j]     = (byte)remappedDepth;
                            enhPixelData[j + 1] = 0;
                            enhPixelData[j + 2] = 0;
                        }
                    }
                    gray = (255 * depth[i] / 0xFFF);
                }
                if (!rgbDepthMode)
                {
                    enhPixelData[j]     = (byte)gray;
                    enhPixelData[j + 1] = (byte)gray;
                    enhPixelData[j + 2] = (byte)gray;
                }
            }


            // draw margins over the depth frame so that the sliders are easier to use
            for (int iiy = 0; iiy < depthFrameDescription.Height; iiy++)
            {
                for (int iix = 0; iix < depthFrameDescription.Width; iix++)
                {
                    if (iix == quadMarginXR || iiy == quadMarginYB)
                    {
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4]     = 0;
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4 + 1] = 0;
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4 + 2] = 255;
                    }
                    if (iix == quadMarginXL || iiy == quadMarginYT)
                    {
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4]     = 255;
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4 + 1] = 0;
                        enhPixelData[(iix + iiy * depthFrameDescription.Width) * 4 + 2] = 255;
                    }
                }
            }
            //create the bitmap and send it to the UI
            DepthImageModified.Source = BitmapSource.Create(depthFrameDescription.Width, depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Bgr32, null, enhPixelData, depthFrameDescription.Width * 4);


            //Determine if we should calculate the depth value based on how many frames have passed
            TimeSpan elapsed = DateTime.Now.Subtract(timeStart);

            if (currentFrame % frameAcceptance == 0)
            {
                calculateMovement(depth, depthFrameDescription.Width, depthFrameDescription.Height);
                currentFrame = 0;
            }

            //Determine if we should save the depth image based on how many frames have passed
            if (currentFrameDepthSave % frameAccDepthSave == 0 && saveDepthPicture)
            {
                calculateMovement(depth, depthFrameDescription.Width, depthFrameDescription.Height);
                string depthfilename = fileHandler.getDepthImgFolderName() + "depthImg_" + ((int)elapsed.TotalSeconds).ToString() + ".jpg";
                using (FileStream savedBitmap = new FileStream(depthfilename, FileMode.CreateNew))
                {
                    BitmapSource      img        = (BitmapSource)(DepthImageModified.Source);
                    JpegBitmapEncoder jpgEncoder = new JpegBitmapEncoder();
                    jpgEncoder.QualityLevel = 70;
                    jpgEncoder.Frames.Add(BitmapFrame.Create(img));
                    jpgEncoder.Save(savedBitmap);
                    savedBitmap.Flush();
                    savedBitmap.Close();
                    savedBitmap.Dispose();
                }
                currentFrameDepthSave = 0;
            }

            //increment counters for next frame
            currentFrameDepthSave++;
            currentFrame++;

            //if saving video was triggered from the kinect we need to check if it has been T/2 since the event. if so then we save the video,
            //if not, then we still the counter for next frame
            if (savingVideo)
            {
                timeOutFrameCount++;
                if (timeOutFrameCount >= videoRecordLength / 2)
                {
                    timeOutFrameCount = 0;
                    savingVideo       = false;
                    SaveVideo(false);
                }
            }
        }