private void b_depth_Click(object sender, EventArgs e) { int th, tht; //Generate Calibration Data _theightc.GenerateCalibrationData(new DepthImage(_deptharray, _width, _height), _calibrationPoints, out th, out tht); DepthMapPreprocessor dmp = new DepthMapPreprocessor(); DepthCorrectionMap dcm = dmp.CreateDepthCorrectionMap(new DepthImage(_deptharray, _width, _height), th); //Set tableHeightTolerance to 25, as there was a depth CorrectionMap created tht = 25; //If there are border-cutoffs, use them try { if (txt_bottom.Text != "") dcm.CutOffBOttom = Convert.ToInt32(txt_bottom.Text); if (txt_left.Text != "") dcm.CutOffLeft = Convert.ToInt32(txt_left.Text); if (txt_r.Text != "") dcm.CutOffRight = Convert.ToInt32(txt_r.Text); if (txt_top.Text != "") dcm.CutOffTop = Convert.ToInt32(txt_bottom.Text); } catch (Exception) { MessageBox.Show("Ungültige werte beim Border CutOff!", "Fehler"); return; } SettingsManager.PreprocessingSet.DefaultCorrectionMap = dcm; _theightc.SetCalibrationData(th, tht); //Save the CorrectionMap if (!SettingsManager.SaveSettings()) { MessageBox.Show("Depth map created and applied, but settings couldn't be saved!", "Warning!"); } else { MessageBox.Show("DepthMap Data Created and saved. Calibration is now finished.", "Success!"); } }
private void b_calibrate_Click(object sender, RoutedEventArgs e) { //margins int top = (int)s_cutoff_top.Value; int left = (int) s_cutoff_left.Value; int right = (int) s_cutoff_right.Value; int bottom = (int) s_cutoff_down.Value; //calibrate if (points.Count == 0) { MessageBox.Show("Es muss mindestens ein kalibrationspunkt vorhanden sein!", "Fehler!"); return; } int distance; int tolerance; _theightc.GenerateCalibrationData(_dimage, points, out distance, out tolerance); _theightc.SetCalibrationData(distance, 25); DepthMapPreprocessor dmp = new DepthMapPreprocessor(); SettingsManager.PreprocessingSet.DefaultCorrectionMap = dmp.CreateDepthCorrectionMap(_dimage, distance); SettingsManager.PreprocessingSet.DefaultCorrectionMap.CutOffBOttom = bottom; SettingsManager.PreprocessingSet.DefaultCorrectionMap.CutOffLeft = left; SettingsManager.PreprocessingSet.DefaultCorrectionMap.CutOffRight = right; SettingsManager.PreprocessingSet.DefaultCorrectionMap.CutOffTop = top; if (SettingsManager.SaveSettings()) { MessageBox.Show("Kalibration erfolgreich ausgeführt, Einstellungen gespeichert", "Erfolg"); } else { MessageBox.Show("Kalibration ausgeführt, Einstellungen konnten aber niht gespeichert werden.", "Achtung"); } this.Close(); }
private void DoRecognitionWork(object data) { object[] dataArray = (object[]) data; PlanarImage pimg = (PlanarImage) dataArray[0]; int[] deptharray = (int[]) dataArray[1]; Bitmap colorFrame = (Bitmap) dataArray[2]; RecognitionDataPacket rpacket = new DataStructures.RecognitionDataPacket(); DateTime dtBegin = DateTime.Now; //Create DepthImage DepthImage dimg = new DepthImage(deptharray,pimg.Width,pimg.Height); rpacket.rawDepthImage = dimg.Clone(); //Correct the image DepthMapPreprocessor dmp = new DepthMapPreprocessor(); dimg = dmp.ApplyDepthCorrection(dimg, SettingsManager.PreprocessingSet.DefaultCorrectionMap); dimg = dmp.NormalizeHeights(dimg); ObjectSeperator objectSeperator = new ObjectSeperator(); //Seperate objects bool[,] boolmap_object; int[,,] neighbourmap; List<TableObject> objects = objectSeperator.SeperateObjects(ref dimg,out boolmap_object,out neighbourmap); //if supplied, extract the relevant bitmap parts from the ColorFrame if (colorFrame != null) { ObjectVideoBitmapAssigner ovba = new ObjectVideoBitmapAssigner(); ovba.AssignVideoBitmap(objects, colorFrame); } //Extract hand object from table objects if (objects.Where( o => o.GetType() == typeof(HandObject)).Count() > 0) { rpacket.HandObj = (HandObject)objects.Where(o => o.GetType() == typeof (HandObject)).ToArray()[0]; } //Fill DataPacket with Data rpacket.correctedDepthImage = dimg; rpacket.TableObjects = objects; rpacket.objectmap = boolmap_object; rpacket.neighbourmap = neighbourmap; rpacket.bmpVideoFrame = colorFrame; TimeSpan ts = DateTime.Now - dtBegin; rpacket.RecognitionDuration = (int)Math.Round(ts.TotalMilliseconds); if (SettingsManager.RecognitionSet.SaveDebugMaps) { Bitmap bmp = MapVisualizer.VisualizeDepthImage(rpacket.rawDepthImage); bmp.Save("rawDepthImage.bmp"); } //Event OnRecognitionFinished(rpacket); }