/// <summary> /// This get the gesture of the hand /// </summary> /// <param name="p_imgData"></param> protected override async void doWork(Object p_imgData) { if (((ImageData)p_imgData).ConvexDefects != null) { MotionGestureProcessing.ImageData.Gestures gesture = ImageData.Gestures.INITIALIZING; List <ConvexDefect> convexDefects = ((ImageData)p_imgData).ConvexDefects; List <Point> fingerTips = ((ImageData)p_imgData).FingerTips; ((ImageData)p_imgData).Gesture = deriveGesture(ref convexDefects, ref fingerTips); } else { m_thumbPos = 0; } //writeGesture(gesture); Processing.getInstance().ToDrawingImage = (ImageData)p_imgData; }
/// <summary> /// There are some rather labor intensive portions of code that I need to have a section of pipeline to handle /// These include: /// Cropping the wrist from the hand /// Finding the contour of the hand /// wrapping a shell around the contour /// </summary> /// <param name="p_imgData"></param> protected override async void doWork(object p_imgData) { byte[] buffer; BitmapData data = BitmapManip.lockBitmap(out buffer, ((ImageData)p_imgData).Image); ((ImageData)p_imgData).DataPoints = ImageProcess.getDataPoints(ref data, ref buffer); if (((ImageData)p_imgData).DataPoints.Count > 0) { m_direction = DIRECTION.INVALID; ((ImageData)p_imgData).Filter = cropDataSet(ref data, ref buffer); ((ImageData)p_imgData).Contour = ImageProcess.getContour(ref data, ref buffer); ((ImageData)p_imgData).ConvexHull = ImageProcess.getConvexHull(((ImageData)p_imgData).Contour); //defects double defectThreshold; if (m_direction == DIRECTION.LEFT || m_direction == DIRECTION.RIGHT) { defectThreshold = ((ImageData)p_imgData).Filter.Width * .15; } else { defectThreshold = ((ImageData)p_imgData).Filter.Height * .15; } ((ImageData)p_imgData).ConvexDefects = ImageProcess.getConvexDefects(((ImageData)p_imgData).Contour, ((ImageData)p_imgData).ConvexHull, defectThreshold); ((ImageData)p_imgData).ConvexDefects = organizeDefects(((ImageData)p_imgData).ConvexDefects); //fingers Dictionary <int, List <Point> > fingerTips = findFingers(ref data, ref buffer); ((ImageData)p_imgData).FingerTips = refineFingerTips(ref fingerTips, ((ImageData)p_imgData).ConvexDefects); } BitmapManip.unlockBitmap(ref buffer, ref data, ((ImageData)p_imgData).Image); Processing.getInstance().ToPCAImage = (ImageData)p_imgData; }
/// <summary> /// this method transforms the image into a filtered image /// UPDATE: this now performs almost insantly instead of the 2 seconds it took before /// </summary> /// <param name="p_imageData"></param> protected override async void doWork(Object p_imageData) { if (m_isInitialized) { //Setting up a buffer to be used for concurrent read/write byte[] buffer; BitmapData data = BitmapManip.lockBitmap(out buffer, ((imageData)p_imageData).Image); //This method returns bit per pixel, we need bytes. int depth = Bitmap.GetPixelFormatSize(data.PixelFormat) / 8; #region Call Parallel.Invoke for each coordinate //Only want to do ARGB and RGB check one time // Creates more code but is faster if (depth == 3) { Parallel.Invoke( () => { //upper left dividedDoWorkRGB(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width); }, () => { //upper right dividedDoWorkRGB(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width); }, () => { //lower left dividedDoWorkRGB(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width); }, () => { //lower right dividedDoWorkRGB(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width); }); } else { Parallel.Invoke( () => { //upper left dividedDoWorkARGB(buffer, 0, 0, data.Width / 2, data.Height / 2, data.Width); }, () => { //upper right dividedDoWorkARGB(buffer, data.Width / 2, 0, data.Width, data.Height / 2, data.Width); }, () => { //lower left dividedDoWorkARGB(buffer, 0, data.Height / 2, data.Width / 2, data.Height, data.Width); }, () => { //lower right dividedDoWorkARGB(buffer, data.Width / 2, data.Height / 2, data.Width, data.Height, data.Width); }); } #endregion ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer); findHand((imageData)p_imageData, data, buffer); //Provide wide area filtering ((imageData)p_imageData).Filter = m_filterArea; //drawCenter(buffer, data, m_center); //Guasian cancelling if (depth == 3) { performCancellingRGB(ref buffer, data); } else { performCancellingARGB(ref buffer, data); } ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer); //Provide finer area filtering and signal enhancing filterNoise(((imageData)p_imageData).Datapoints, ref data, ref buffer); strengthenSignal(ref data, ref buffer); ((imageData)p_imageData).Datapoints = getDataPoints(ref data, ref buffer); BitmapManip.unlockBitmap(ref buffer, ref data, ((imageData)p_imageData).Image); Processing.getInstance().ToPCAImage = (imageData)p_imageData; //If someone is listener raise an event if (ProcessReady != null) { ProcessReady(); } } }