public void CreateHistogram(Image<Bgr, Byte> img) { Image<Gray, Byte>[] channels = img.Split(); Image<Gray, Byte> blueChannel = channels[0]; Image<Gray, Byte> greenChannel = channels[1]; Image<Gray, Byte> redChannel = channels[2]; //Red colour channel DenseHistogram dhRed = new DenseHistogram(BIN_SIZE, new RangeF(0.0f, BIN_DEPTH)); dhRed.Calculate<Byte>(new Image<Gray, Byte>[1] { redChannel }, false, null); redChart.ClearHistogram(); redChart.AddHistogram("Red Channel", System.Drawing.Color.Red, dhRed); redChart.Refresh(); //Green colour Channel DenseHistogram dhGreen = new DenseHistogram(BIN_SIZE, new RangeF(0.0f, BIN_DEPTH)); dhGreen.Calculate<Byte>(new Image<Gray, Byte>[1] { greenChannel }, false, null); greenChart.ClearHistogram(); greenChart.AddHistogram("Green Channel", System.Drawing.Color.Green, dhGreen); greenChart.Refresh(); //Blue colour Channel DenseHistogram dhBlue = new DenseHistogram(BIN_SIZE, new RangeF(0.0f, BIN_DEPTH)); dhBlue.Calculate<Byte>(new Image<Gray, Byte>[1] { blueChannel }, false, null); blueChart.ClearHistogram(); blueChart.Show(); blueChart.AddHistogram("Blue Channel", System.Drawing.Color.Blue, dhBlue); blueChart.Refresh(); }
/// <summary> /// Add a plot of the 1D histogram. You should call the Refresh() function to update the control after all modification is complete. /// </summary> /// <param name="name">The name of the histogram</param> /// <param name="color">The drawing color</param> /// <param name="histogram">The 1D histogram to be drawn</param> public void AddHistogram(String name, Color color, DenseHistogram histogram) { Debug.Assert(histogram.Dimension == 1, Properties.StringTable.Only1DHistogramSupported); GraphPane pane = new GraphPane(); // Set the Title pane.Title.Text = name; pane.XAxis.Title.Text = Properties.StringTable.Value; pane.YAxis.Title.Text = Properties.StringTable.Count; #region draw the histogram RangeF range = histogram.Ranges[0]; int binSize = histogram.BinDimension[0].Size; float step = (range.Max - range.Min) / binSize; float start = range.Min; double[] bin = new double[binSize]; for (int binIndex = 0; binIndex < binSize; binIndex++) { bin[binIndex] = start; start += step; } PointPairList pointList = new PointPairList( bin, Array.ConvertAll<float, double>( (float[]) histogram.MatND.ManagedArray, System.Convert.ToDouble)); pane.AddCurve(name, pointList, color); #endregion zedGraphControl1.MasterPane.Add(pane); }
public void CalculateHistogram(Image<Gray, byte> source) { histogram = new DenseHistogram(16, new RangeF(0, 180)); mask = source.InRange(VideoParameters.Default.CamshiftMaskLow, VideoParameters.Default.CamshiftMaskHigh); CvInvoke.cvCalcHist(new[] { source.Ptr }, histogram.Ptr, false, mask.Ptr); SetTrackWindow(source.ROI); }
public static float[] CalculateHistogram(Image<Gray, byte> image) { var histogram = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); histogram.Calculate(new[] {image}, true, null); var values = new float[256]; histogram.MatND.ManagedArray.CopyTo(values, 0); return values; }
/// <summary> /// Display the specific histogram /// </summary> /// <param name="hist">The 1 dimension histogram to be displayed</param> /// <param name="title">The name of the histogram</param> public static void Show(DenseHistogram hist, string title) { HistogramViewer viewer = new HistogramViewer(); if (hist.Dimension == 1) viewer.HistogramCtrl.AddHistogram(title, Color.Black, hist); viewer.HistogramCtrl.Refresh(); viewer.Show(); }
// Returns a histogram for each bgr channel private DenseHistogram[] CreateHistogram(Image<Bgr, Byte> img, int buckets) { DenseHistogram BlueHisto = new DenseHistogram(buckets, new RangeF(0, buckets - 1)); DenseHistogram GreenHisto = new DenseHistogram(buckets, new RangeF(0, buckets - 1)); DenseHistogram RedHisto = new DenseHistogram(buckets, new RangeF(0, buckets - 1)); Image<Gray, Byte> img2Blue = img[0]; Image<Gray, Byte> img2Green = img[1]; Image<Gray, Byte> img2Red = img[2]; BlueHisto.Calculate(new Image<Gray, Byte>[] { img2Blue }, true, null); GreenHisto.Calculate(new Image<Gray, Byte>[] { img2Green }, true, null); RedHisto.Calculate(new Image<Gray, Byte>[] { img2Red }, true, null); //Histo.MatND.ManagedArray return new DenseHistogram[] { BlueHisto, GreenHisto, RedHisto }; }
private float[][] ArrayHistogram(DenseHistogram[] histo, int buckets) { float[] BlueHisto; float[] GreenHisto; float[] RedHisto; BlueHisto = new float[buckets]; histo[0].MatND.ManagedArray.CopyTo(BlueHisto, 0); GreenHisto = new float[buckets]; histo[1].MatND.ManagedArray.CopyTo(GreenHisto, 0); RedHisto = new float[buckets]; histo[2].MatND.ManagedArray.CopyTo(RedHisto, 0); return new float[][] { BlueHisto, GreenHisto, RedHisto }; }
public ObjectTracking(Image<Bgr, Byte> image, Rectangle ROI) { // Initialize parameters trackbox = new MCvBox2D(); trackcomp = new MCvConnectedComp(); hue = new Image<Gray, byte>(image.Width, image.Height); hue._EqualizeHist(); mask = new Image<Gray, byte>(image.Width, image.Height); hist = new DenseHistogram(30, new RangeF(0, 180)); backproject = new Image<Gray, byte>(image.Width, image.Height); // Assign Object's ROI from source image. trackingWindow = ROI; // Producing Object's hist CalObjectHist(image); }
public Picture Histogram() { var chans = 3; var bins = 256; var range = new RangeF(0, 255); var hist = new DenseHistogram(bins, range); var split = this.bgra.Split(); var colors = new Bgra[] { new Bgra(255, 0, 0, 255), new Bgra(0, 255, 0, 255), new Bgra(0, 0, 255, 255), }; var hip = new Picture(bins * chans, bins + 1); // Todo, plus one Jaap, really? Tssssk... wrote Jaap to himself. hip.Bgra.SetValue(Color.Black.ToBgra()); for (int chan = 0; chan < chans; ++chan) { hist.Calculate<byte>(new Image<Gray, byte>[] { split[chan] }, false, null); // Todo, Jaap, December 2010, hist.Normalize(bins - 1); float min, max; int[] minLoc, maxLoc; hist.MinMax(out min, out max, out minLoc, out maxLoc); if (max == min) continue; var scale = 255.0f / (max - min); for (int x = 0; x < bins; ++x) { var n = hip.Height - (int)(hist[x] * scale); for (int y = hip.Height - 1; y > n; --y) hip.Bgra[y, x + chan * bins] = colors[chan]; } } foreach (var c in split) c.Dispose(); return hip; }
static void Main(string[] args) { Console.WriteLine("test1"); imgTemplate = new Image<Gray, byte>("..\\..\\Include\\IMG\\testDetectionBlanc\\1MZoneT2.tif"); Console.WriteLine("test2"); img = new Image<Gray, byte>("..\\..\\Include\\IMG\\testDetectionBlanc\\1ZoneT2.tif"); Console.WriteLine("test3"); /*HistogramViewer.Show(imgTemplate); HistogramViewer.Show(img);*/ imgTemplate = new ImageModification().convertionBinaire(imgTemplate); img = new ImageModification().convertionBinaire(img); // Create and initialize histogram DenseHistogram hist1 = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); // Histogram Computing hist1.Calculate<Byte>(new Image<Gray, byte>[] { img }, true, null); //hist1. // Create and initialize histogram DenseHistogram hist2 = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); // Histogram Computing hist2.Calculate<Byte>(new Image<Gray, byte>[] { imgTemplate }, true, null); Double result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Correl); Console.WriteLine("Correlation : "+result); result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Chisqr); Console.WriteLine("Chi-Square : " + result); result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Intersect); Console.WriteLine("Intersection : " + result); result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Bhattacharyya); Console.WriteLine("Bhattacharyya distance : " + result); result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.Hellinger); Console.WriteLine("Synonym for Bhattacharyya : " + result); result = Emgu.CV.CvInvoke.CompareHist(hist1, hist2, HistogramCompMethod.ChisqrAlt); Console.WriteLine("Alternative Chi-Square : " + result); Console.WriteLine(img.Rows + " " + img.Cols); double blackPixelImg = (img.Rows * img.Cols) - img.CountNonzero()[0]; double blackPixelImgTemplate = (imgTemplate.Rows * imgTemplate.Cols) - imgTemplate.CountNonzero()[0]; double resultat = blackPixelImgTemplate / blackPixelImg; Console.WriteLine(resultat); Console.ReadKey(); }
public void TestDenseHistogram() { Image<Gray, Byte> img = new Image<Gray, byte>(400, 400); img.SetRandUniform(new MCvScalar(), new MCvScalar(255)); DenseHistogram hist = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); hist.Calculate<Byte>(new Image<Gray, byte>[] { img }, true, null); float[] binValues = hist.GetBinValues(); /* using (MemoryStream ms = new MemoryStream()) { System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); formatter.Serialize(ms, hist); Byte[] bytes = ms.GetBuffer(); using (MemoryStream ms2 = new MemoryStream(bytes)) { Object o = formatter.Deserialize(ms2); DenseHistogram hist2 = (DenseHistogram)o; EmguAssert.IsTrue(hist.Equals(hist2)); } }*/ }
// Compares histogram over each possible rectangular patch of the specified size in the input images, and stores the results to the output map dst. // Destination back projection image of the same type as the source images public static Image <Gray, Single> BackProjectPatch <TDepth>(Image <Gray, TDepth>[] srcs, Size patchSize, DenseHistogram hist, HISTOGRAM_COMP_METHOD method, double factor) where TDepth : new() { Debug.Assert(srcs.Length == hist.Dimension, "The number of the source image and the dimension of the histogram must be the same."); IntPtr[] imgPtrs = Array.ConvertAll <Image <Gray, TDepth>, IntPtr>( srcs, delegate(Image <Gray, TDepth> img) { return(img.Ptr); }); Size size = srcs[0].Size; size.Width = size.Width - patchSize.Width + 1; size.Height = size.Height - patchSize.Height + 1; Image <Gray, Single> res = new Image <Gray, float>(size); CvInvoke.cvCalcBackProjectPatch(imgPtrs, res.Ptr, patchSize, hist.Ptr, method, factor); return(res); }
private void DrawHist2D(Image<Gray, byte> dst, DenseHistogram hist) { int pw = (int)(dst.Width / hist.BinDimension[0].Size); int ph = (int)(dst.Height / hist.BinDimension[1].Size); float min, max; int[] tmp1, tmp2; hist.MinMax(out min, out max, out tmp1, out tmp2); for (int ix = 0; ix < hist.BinDimension[0].Size; ix++ ) { for (int iy = 0; iy < hist.BinDimension[1].Size; iy ++) { double intensity = hist[ix, iy] * 255 / max; dst.Draw(new Rectangle(ix * pw, iy * ph, pw, ph), new Gray(intensity), -1); } } }
public Form1() { InitializeComponent(); DenseHistogram h = new DenseHistogram(5, new RangeF(0, 255)); ContourPoints = new Point[SEGMENTS]; }
/// <summary> /// works out hsv histogram of image and give us back an arrays for Hue, saturation, and value /// </summary> /// <param name="src"> /// source image we want to get the historgram of /// </param> /// <returns> /// returns 2d array for hue sat and val /// </returns> protected float[][] HsvValueFloatArray(Image<Bgr, Byte> src) { var HsvList = new List<float[]>(); float[] HueHist; float[] SatHist; float[] ValHist; HueHist = new float[256]; SatHist = new float[256]; ValHist = new float[256]; DenseHistogram HistoHue = new DenseHistogram(256, new RangeF(0, 256)); DenseHistogram HistoSat = new DenseHistogram(256, new RangeF(0, 256)); DenseHistogram HistoVal = new DenseHistogram(256, new RangeF(0, 256)); Image<Hsv, Byte> hsvColor = src.Convert<Hsv, Byte>(); Image<Gray, Byte> Comparedimg2Hsv = hsvColor[0]; Image<Gray, Byte> Comparedimg2Sat = hsvColor[1]; Image<Gray, Byte> Comparedimg2Val = hsvColor[2]; HistoHue.Calculate(new Image<Gray, Byte>[] { Comparedimg2Hsv }, true, null); HistoSat.Calculate(new Image<Gray, Byte>[] { Comparedimg2Sat }, true, null); HistoVal.Calculate(new Image<Gray, Byte>[] { Comparedimg2Val }, true, null); //HistoVal.Calculate( HistoHue.MatND.ManagedArray.CopyTo(HueHist, 0); HistoSat.MatND.ManagedArray.CopyTo(SatHist, 0); HistoVal.MatND.ManagedArray.CopyTo(ValHist, 0); HsvList.Add(HueHist); HsvList.Add(SatHist); HsvList.Add(ValHist); return HsvList.ToArray(); }
//histogram match static bool HistogramMatch(Mat frame1, Mat frame2) { //convert frames to HSV color space Mat frame1_hist = new Mat(); Mat frame2_hist = new Mat(); CvInvoke.CvtColor(frame1, frame1_hist, ColorConversion.Bgr2Gray); CvInvoke.CvtColor(frame2, frame2_hist, ColorConversion.Bgr2Gray); //set histogram parameters int h_bins = 50; int s_bins = 60; int[] histSize = { h_bins, s_bins }; //int histSize[] = { h_bins, s_bins }; float[] h_ranges = { 0, 180 }; float[] s_ranges = { 0, 256 }; //const float[] ranges = { h_ranges, s_ranges }; float[][] ranges = { h_ranges }; //GrayHist = new float[256]; // Image<Gray, Byte> img_gray = new Image<Gray, byte>(frame1_hist.Rows, frame1_hist.Cols); // frame1_hist.CopyTo(img_gray, null); // DenseHistogram hist = new DenseHistogram(256, new RangeF(0, 256)); // hist.Calculate(new Image<Gray, Byte>[] { img_gray }, true, null); //hist.Calculate(new Image<Gray, Byte>[] { frame1_hist.ToImage<Gray, Byte>() }, true, null); //try each... //frame1_hist.ToImage<Gray, Byte>().MIplImage() //frame1_hist.ToImage<Gray, Byte> //frame1_hist.ToImage<Gray, Byte>().To //imencode: ``imdecode`` and ``imencode`` to read and write image from/to memory rather than a file. Image <Gray, float> img_temp = frame1_hist.ToImage <Gray, float>(); Matrix <float> img = new Matrix <float>(img_temp.Width, img_temp.Height); //img_temp.CopyTo(img); CvInvoke.cvCopy(img_temp, img, new IntPtr()); DenseHistogram hist = new DenseHistogram(256, new RangeF(0, 256)); hist.Calculate(new Matrix <float>[] { img }, true, null); // Create a grayscale image //Image<Gray, Byte> img = new Image<Gray, byte>(400, 400); // Fill image with random values //img.SetRandUniform(new MCvScalar(), new MCvScalar(255)); // Create and initialize histogram //DenseHistogram hist = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); // Histogram Computing //hist.Calculate<Byte>(new Image<Gray, byte>[] { img }, true, null); //CvArray<byte> aaa = null; //frame1_hist.CopyTo(aaa, null); //Image<Gray, Byte> temp_img = new Image<Gray, byte>(frame1_hist.Rows, frame1_hist.Cols, 1, frame1_hist); //frame1_hist.CopyTo(temp_img, null); //hist.Calculate<Byte>(new Image<Gray, byte>[] { temp_img }, true, null); //int[] channels = { 0, 1 }; //Use the o-th and 1-st channels ////calculate and normalize histograms ////MatND frame1_norm; ////MatND frame2_norm; //MatND<double> frame1_norm; //MatND<double> frame2_norm; //CvInvoke.CalcHist(frame1_hist, channels, null, frame1_norm, histSize, ranges, false); //CvInvoke.Normalize(frame1_norm, frame1_norm, 0, 1, NormType.MinMax, DepthType.Default, null); //CvInvoke.CalcHist(frame2_hist, channels, null, frame2_norm, histSize, ranges, false); //CvInvoke.Normalize(frame2_norm, frame2_norm, 0, 1, NormType.MinMax, DepthType.Default, null); //double result = CvInvoke.CompareHist(frame1_norm, frame2_norm, 0); //Correlation comparion //float thresh_min = 0.2f; //get unique keyframes //float thresh_max = 0.997f; //good for fast moving objects in video //if (result < thresh_min) // return false; //not a match: likely different scenes return(true); }
public static bool SaveHistogram(string name, string folderExt, string fileNameExt, DenseHistogram hist, ILogger logger) { try { float[] Hist = hist.GetBinValues(); Save1DArrayToCsv <float>(name, folderExt, fileNameExt, ".csv", Hist, logger); return(true); } catch (Exception ex) { logger?.ErrorLog($"Exception occured: {ex}", ClassName); return(false); } }
public List<Bitmap> histogramSearch(List<Bitmap> images, Bitmap ori_image) { float[] BlueHist; float[] GreenHist; float[] RedHist; float[] base_BlueHist; float[] base_GreenHist; float[] base_RedHist; histoImageList.Clear(); Image<Bgr, byte> base_img = new Image<Bgr, byte>(ori_image); DenseHistogram blue_Histo = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram green_Histo = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram red_Histo = new DenseHistogram(255, new RangeF(0, 255)); Image<Gray, Byte> base_img2Blue = base_img[0]; Image<Gray, Byte> base_img2Green = base_img[1]; Image<Gray, Byte> base_img2Red = base_img[2]; blue_Histo.Calculate(new Image<Gray, Byte>[] { base_img2Blue }, true, null); green_Histo.Calculate(new Image<Gray, Byte>[] { base_img2Green }, true, null); red_Histo.Calculate(new Image<Gray, Byte>[] { base_img2Red }, true, null); for (int i = 0; i < images.Count; i++) { Image<Bgr, byte> img = new Image<Bgr, byte>(images[i]); DenseHistogram blue_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram green_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram red_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); Image<Gray, Byte> img2Blue = img[0]; Image<Gray, Byte> img2Green = img[1]; Image<Gray, Byte> img2Red = img[2]; blue_CompareHisto.Calculate(new Image<Gray, Byte>[] { img2Blue }, true, null); BlueHist = new float[256]; blue_CompareHisto.MatND.ManagedArray.CopyTo(BlueHist, 0); green_CompareHisto.Calculate(new Image<Gray, Byte>[] { img2Green }, true, null); GreenHist = new float[256]; green_CompareHisto.MatND.ManagedArray.CopyTo(GreenHist, 0); red_CompareHisto.Calculate(new Image<Gray, Byte>[] { img2Red }, true, null); RedHist = new float[256]; red_CompareHisto.MatND.ManagedArray.CopyTo(RedHist, 0); double cBlue = CvInvoke.cvCompareHist(blue_Histo, blue_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cGreen = CvInvoke.cvCompareHist(green_Histo, green_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cRed = CvInvoke.cvCompareHist(red_Histo, red_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cBlue_ratio = (cBlue / 1.00) * 100; double cRed_ratio = (cRed / 1.00) * 100; double cGreen_ratio = (cGreen / 1.00) * 100; if (cBlue_ratio > 55 || cRed_ratio > 55 || cGreen_ratio > 55) { Image<Bgr, byte> resizedImage = img.Resize(300, 300, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); histoImageList.Add(resizedImage.ToBitmap()); } } Image<Bgr, byte> base_resizedImage = base_img.Resize(300, 300, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); histoImageList.Add(base_resizedImage.ToBitmap()); return histoImageList; }
public override bool Execute(DenseHistogram inputHistogram, out float minPos) { minPos = 0; if (!IsInitialized) { Logger?.InfoLog("It is not initialized yet.", ClassName); return(false); } if (inputHistogram == null) { Logger.TraceLog("InputHistogramm is null!", ClassName); return(false); } Hist = inputHistogram.GetBinValues(); try { float[,] mean = new float[4096, 2]; float[,] deviation = new float[4096, 2]; float[,] num = new float[256, 2]; float min = 100000; for (int nt = 5; nt < 256 - 5; nt++) { for (int n = 0; n < 256; n++) { if (n < nt) { num[nt, 0] += Hist[n]; mean[nt, 0] += n * Hist[n]; deviation[nt, 0] += n * n * Hist[n]; } else { num[nt, 1] += Hist[n]; mean[nt, 1] += n * Hist[n]; deviation[nt, 1] += n * n * Hist[n]; } } mean[nt, 0] = mean[nt, 0] / num[nt, 0]; deviation[nt, 0] = (float)Math.Sqrt(deviation[nt, 0] / num[nt, 0] - mean[nt, 0] * mean[nt, 0]); mean[nt, 1] = mean[nt, 1] / num[nt, 1]; deviation[nt, 1] = (float)Math.Sqrt(deviation[nt, 1] / num[nt, 1] - mean[nt, 1] * mean[nt, 1]); if (min > deviation[nt, 0] + deviation[nt, 1]) { min = deviation[nt, 0] + deviation[nt, 1]; minPos = nt; } } return(true); } catch (Exception ex) { Logger?.ErrorLog($"Exception occured: {ex}", ClassName); return(false); } }
//Method to recalculate and redisplay the histogram private void showHistogram() { //create the histogram histogram = new DenseHistogram(16, new RangeF(0, 255)); //Clear out an old histogram, if one exists //If this is not done, histogram calculated on ROI histogramBox1.ClearHistogram(); histogramBox1.Refresh(); //Clear the ROI, if one exists (imageBox1.Image as Image<Bgr, byte> ).ROI = Rectangle.Empty; histogramBox1.GenerateHistograms( imageBox1.Image, 256); histogramBox1.Refresh(); }
public BasicInfo(ref Image <Rgba, byte> input) { table = new ImgDB.BasicInfoDataTable(); int nBins = 256; RangeF range1 = new RangeF(0, 255); DenseHistogram hist = new DenseHistogram(nBins, range1); Image <Gray, byte>[] isg = input.Split(); histogramsRgba = new List <float[]>(); averagesRgba = new List <Gray>(); sdsRgba = new List <MCvScalar>(); for (short i = 0; i < isg.Count(); i++) { hist.Calculate(new Image <Gray, byte>[] { isg[i] }, false, null); float[] values = hist.GetBinValues(); histogramsRgba.Add(values); Gray gr = new Gray(); MCvScalar sd = new MCvScalar(); isg[i].AvgSdv(out gr, out sd); averagesRgba.Add(gr); sdsRgba.Add(sd); ImgDB.BasicInfoRow row = table.NewBasicInfoRow(); row.Avg = gr.Intensity; row.Histogram = values; row.SD = sd.V0; row.Channel = i; if (i == 0) { row.ChannelName = "Red"; } else if (i == 1) { row.ChannelName = "Green"; } else if (i == 2) { row.ChannelName = "Blue"; } else if (i == 3) { row.ChannelName = "Alpha"; } table.AddBasicInfoRow(row); } //factores float rF = 1; float gF = 1; float bF = 1; float AF = 1; ImgDB.BasicInfoRow red = table.FirstOrDefault(o => o.Channel == 0); ImgDB.BasicInfoRow green = table.FirstOrDefault(o => o.Channel == 1); ImgDB.BasicInfoRow blue = table.FirstOrDefault(o => o.Channel == 2); ImgDB.BasicInfoRow alpha = table.FirstOrDefault(o => o.Channel == 3); float sum = (float)(red.Avg + green.Avg + blue.Avg + alpha.Avg); sum /= 4; rF = (float)(sum / red.Avg); gF = (float)(sum / green.Avg); bF = (float)(sum / blue.Avg); AF = (float)(sum / alpha.Avg); red.Factor = rF; green.Factor = gF; blue.Factor = bF; alpha.Factor = AF; }
void histoG(Image <Gray, byte> img, Image <Bgr, byte> img2, ref float meangray, ref float meanR, ref float meanG, ref float meanB) { DenseHistogram Histo = new DenseHistogram(256, new RangeF(0, 255)); DenseHistogram Histo_temp = new DenseHistogram(256, new RangeF(0, 255)); float[,] colorHist = new float[3, 256]; float[] tempHist = new float[256]; float[] grayHist = new float[256]; Histo.Calculate(new Image <Gray, byte>[] { img }, true, null); Image <Gray, Byte>[] images = img2.Split(); Histo_temp.Calculate(new Image <Gray, byte>[] { images[0] }, true, null); Histo_temp.MatND.ManagedArray.CopyTo(tempHist, 0); for (int m = 0; m < 256; m++) { colorHist[0, m] = tempHist[m]; } Histo_temp.Calculate(new Image <Gray, byte>[] { images[1] }, true, null); Histo_temp.MatND.ManagedArray.CopyTo(tempHist, 0); for (int m = 0; m < 256; m++) { colorHist[1, m] = tempHist[m]; } Histo_temp.Calculate(new Image <Gray, byte>[] { images[2] }, true, null); Histo_temp.MatND.ManagedArray.CopyTo(tempHist, 0); for (int m = 0; m < 256; m++) { colorHist[2, m] = tempHist[m]; } Histo.MatND.ManagedArray.CopyTo(grayHist, 0); // HistogramViewer.Show(Histo, "histo"); // HistogramViewer.Show(img2, 256); meangray = 0; meanR = 0; meanG = 0; meanB = 0; int totalgray = 0, totalR = 0, totalG = 0, totalB = 0; for (int m = 0; m < 255; m++) { meangray = meangray + grayHist[m] * m; totalgray = totalgray + (int)grayHist[m]; meanB = meanB + colorHist[0, m] * m; totalB = totalB + (int)colorHist[0, m]; meanG = meanG + colorHist[1, m] * m; totalG = totalG + (int)colorHist[1, m]; meanR = meanR + colorHist[2, m] * m; totalR = totalR + (int)colorHist[2, m]; } meangray = meangray / totalgray; meanB = meanB / totalB; meanG = meanG / totalG; meanR = meanR / totalR; richTextBox3.Text = "MGray:" + meangray + "\nMG:" + meanG + "\nMB:" + meanB + "\nMR:" + meanR; }
public Form1() { InitializeComponent(); SetAnimation(); //ConnectToServer(); //gamestate = GameState.GAME; myTimer.Tick += new EventHandler(GameDraw); myTimer.Interval = 25; myTimer.Start(); controlTimer.Tick += new EventHandler(GameControlReset); controlTimer.Interval = 500; myplayerTimer.Tick += new EventHandler(PlayerStateReset); myplayerTimer.Interval = 5000; FPU = new FrameProcessor(); FPU.Reset(); IFormatter formatter = new BinaryFormatter(); FileStream fs = new FileStream("../../hist.dat", FileMode.Open); HistSerial hs = (HistSerial)formatter.Deserialize(fs); hist = hs.hist; FPU.SetHist(hist); }
public void ProcessFrame(Image<Bgr, Byte> frame) { sw.Reset(); sw.Start(); MCvAvgComp[] faces = FaceDetect(frame); sw.Stop(); t_facedetect = sw.ElapsedMilliseconds; sw.Reset(); sw.Start(); Image<Hsv, Byte> hsv = frame.Convert<Hsv, Byte>(); Image<Gray, Byte> hue = new Image<Gray, byte>(frame.Width, frame.Height); Image<Gray, Byte> mask = new Image<Gray, byte>(frame.Width, frame.Height); Emgu.CV.CvInvoke.cvInRangeS(hsv, new MCvScalar(0, 30, 30, 0), new MCvScalar(180, 256, 256, 0), mask); Emgu.CV.CvInvoke.cvSplit(hsv, hue, IntPtr.Zero, IntPtr.Zero, IntPtr.Zero); if (isTracked == false) { if (faces.Length != 0) { var ff = faces[0]; Rectangle smallFaceROI = new Rectangle(ff.rect.X + ff.rect.Width / 8, ff.rect.Y + ff.rect.Height / 8, ff.rect.Width / 4, ff.rect.Height / 4); _hist = GetHist(hue, smallFaceROI, mask); isTracked=true; th_check = true; center = new Point[] { new Point(0, 0), new Point(0, 0) }; } else { have_face=false; have_left=false; have_right=false; return; } } sw.Stop(); t_hue = sw.ElapsedMilliseconds; if (faces.Length != 0) { face_rect = faces[0].rect; face = face_rect; have_face = true; } else { face = face_rect; have_face = false; } sw.Reset(); sw.Start(); backproject = GetBackproject(hue, _hist, mask, face_rect).ThresholdToZero(new Gray(backproj_threshold)); sw.Stop(); t_backproject = sw.ElapsedMilliseconds; sw.Reset(); sw.Start(); if (isTracked) { center = kmeans(center, backproject, face_rect, kmeans_scale); center = refine_center(center, backproject); } sw.Stop(); t_kmeans = sw.ElapsedMilliseconds; sw.Reset(); sw.Start(); right = new Rectangle(center[0].X - hand_size / 2, center[0].Y - hand_size / 2, hand_size, hand_size); left = new Rectangle(center[1].X - hand_size / 2, center[1].Y - hand_size / 2, hand_size, hand_size); backproject.ROI = left; left_mom=backproject.GetMoments(false); backproject.ROI = right; right_mom = backproject.GetMoments(false); Emgu.CV.CvInvoke.cvResetImageROI(backproject); sw.Stop(); t_hand = sw.ElapsedMilliseconds; ProcessInput(); }
private DenseHistogram GetHist(Image<Gray, Byte> hue, Rectangle ROI, Image<Gray, Byte> mask) { DenseHistogram hist=new DenseHistogram(16,new RangeF(0,180)); Emgu.CV.CvInvoke.cvSetImageROI(hue, ROI); Emgu.CV.CvInvoke.cvSetImageROI(mask, ROI); IntPtr[] imgs = new IntPtr[1] { hue }; Emgu.CV.CvInvoke.cvCalcHist(imgs, hist, false, mask); Emgu.CV.CvInvoke.cvResetImageROI(hue); Emgu.CV.CvInvoke.cvResetImageROI(mask); return hist; }
public List <Bitmap> histogramSearch(List <Bitmap> images, Bitmap ori_image) { float[] BlueHist; float[] GreenHist; float[] RedHist; float[] base_BlueHist; float[] base_GreenHist; float[] base_RedHist; histoImageList.Clear(); Image <Bgr, byte> base_img = new Image <Bgr, byte>(ori_image); DenseHistogram blue_Histo = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram green_Histo = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram red_Histo = new DenseHistogram(255, new RangeF(0, 255)); Image <Gray, Byte> base_img2Blue = base_img[0]; Image <Gray, Byte> base_img2Green = base_img[1]; Image <Gray, Byte> base_img2Red = base_img[2]; blue_Histo.Calculate(new Image <Gray, Byte>[] { base_img2Blue }, true, null); green_Histo.Calculate(new Image <Gray, Byte>[] { base_img2Green }, true, null); red_Histo.Calculate(new Image <Gray, Byte>[] { base_img2Red }, true, null); for (int i = 0; i < images.Count; i++) { Image <Bgr, byte> img = new Image <Bgr, byte>(images[i]); DenseHistogram blue_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram green_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); DenseHistogram red_CompareHisto = new DenseHistogram(255, new RangeF(0, 255)); Image <Gray, Byte> img2Blue = img[0]; Image <Gray, Byte> img2Green = img[1]; Image <Gray, Byte> img2Red = img[2]; blue_CompareHisto.Calculate(new Image <Gray, Byte>[] { img2Blue }, true, null); BlueHist = new float[256]; blue_CompareHisto.MatND.ManagedArray.CopyTo(BlueHist, 0); green_CompareHisto.Calculate(new Image <Gray, Byte>[] { img2Green }, true, null); GreenHist = new float[256]; green_CompareHisto.MatND.ManagedArray.CopyTo(GreenHist, 0); red_CompareHisto.Calculate(new Image <Gray, Byte>[] { img2Red }, true, null); RedHist = new float[256]; red_CompareHisto.MatND.ManagedArray.CopyTo(RedHist, 0); double cBlue = CvInvoke.cvCompareHist(blue_Histo, blue_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cGreen = CvInvoke.cvCompareHist(green_Histo, green_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cRed = CvInvoke.cvCompareHist(red_Histo, red_CompareHisto, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); double cBlue_ratio = (cBlue / 1.00) * 100; double cRed_ratio = (cRed / 1.00) * 100; double cGreen_ratio = (cGreen / 1.00) * 100; if (cBlue_ratio > 55 || cRed_ratio > 55 || cGreen_ratio > 55) { Image <Bgr, byte> resizedImage = img.Resize(300, 300, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); histoImageList.Add(resizedImage.ToBitmap()); } } Image <Bgr, byte> base_resizedImage = base_img.Resize(300, 300, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); histoImageList.Add(base_resizedImage.ToBitmap()); return(histoImageList); }
private void Model_Click(object sender, EventArgs e) { //### set currImage which has been used for taking model as modelImage modelImg = currImage[0]; //### Background Subtraction ~ currImage is subtracted with the bgImage and the result is stored in finalBlobImg. bgSubtraction(currImage[0], bgImage[0], ref finalBlobImg1, ref blobRect1); /* Allocate buffers */ hsv = new Image <Hsv, Byte>(w, h); hue = new Image <Bgr, Byte>(w, h); mask = new Image <Gray, Byte>(w, h); backproject = new Image <Bgr, Byte>(w, h); hist = new DenseHistogram(hdims, hranges);//cvCreateHist(1, &hdims, CV_HIST_ARRAY, &hranges, 1); hsv = modelImg.Convert <Hsv, Byte>(); //extract the hue and value channels Image <Gray, Byte>[] channels = hsv.Split(); //split into components Image <Gray, Byte>[] imghue = new Image <Gray, byte> [1]; imghue[0] = channels[0]; //hsv, so channels[0] is hue. Image <Gray, Byte> imgval = channels[2]; //hsv, so channels[2] is value. Image <Gray, Byte> imgsat = channels[1]; //hsv, so channels[1] is saturation. /** * Check if the pixels in hsv fall within a particular range. * H: 0 to 180 * S: smin to 256 * V: vmin to vmax * Store the result in variable: mask */ Hsv hsv_lower = new Hsv(0, smin, Math.Min(vmin, vmax)); Hsv hsv_upper = new Hsv(180, 256, Math.Max(vmin, vmax)); mask = hsv.InRange(hsv_lower, hsv_upper); //setting ROI to images mask.ROI = selection; imghue[0] = imghue[0].And(finalBlobImg1.Erode(3)); imghue[0].ROI = selection; /** * Calculate the histogram of selected region * Store the result in variable: hist */ hist.Calculate(imghue, false, mask); /* Scale the histogram */ float hMin, hMax; int[] minLoc; int[] maxLoc; hist.MinMax(out hMin, out hMax, out minLoc, out maxLoc); /* Reset ROI for hue and mask */ CvInvoke.cvResetImageROI(imghue[0]); CvInvoke.cvResetImageROI(mask); /* Set tracking windows */ track_window_mean1 = selection; //track_window_mean2 = selection; //### Update the pictureBoxes with mask and hsv images //pictureBox2.Image = mask.Bitmap; //pictureBox2.Image = finalBlobImg1.Bitmap; //pictureBox4.Image = finalBlobImg1.Bitmap; //pictureBox3.Image = imghue[0].Or(finalBlobImg1).Bitmap; Console.WriteLine("###Model Captured...."); modelFlag = true; }
private void pictureList_SelectedIndexChanged(object sender, EventArgs e) { string zdjecie = pictureList.SelectedItem.ToString(); string pelnaSciezka = SciezkaFolderZeZdjeciami + zdjecie; Image image = Image.FromFile(pelnaSciezka); var histogramImage = new Image <Bgr, Byte>(pelnaSciezka); //var hsvImage = histogramImage.Convert<Hsv, Byte>(); pictureBox.Image = image; //pictureBoxHSV.Image = hsvImage.Bitmap; //histogramBox.ClearHistogram(); //histogramBox.GenerateHistograms(histogramImage, 256); //histogramBox.Refresh(); //histogramBoxHSV.ClearHistogram(); //histogramBoxHSV.GenerateHistograms(hsvImage, 256); //histogramBoxHSV.Refresh(); #region oblicz wartosci histogramu float[] BlueHist; float[] GreenHist; float[] RedHist; var img = new Image <Bgr, byte>(pelnaSciezka); var Histo = new DenseHistogram(255, new RangeF(0, 255)); Image <Gray, Byte> img2Blue = img[0]; Image <Gray, Byte> img2Green = img[1]; Image <Gray, Byte> img2Red = img[2]; Histo.Calculate(new[] { img2Blue }, true, null); BlueHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(BlueHist, 0); Histo.Clear(); Histo.Calculate(new[] { img2Green }, true, null); GreenHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(GreenHist, 0); Histo.Clear(); Histo.Calculate(new[] { img2Red }, true, null); RedHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(RedHist, 0); #endregion #region narusyj histogram GraphPane mPane = zedGraph.GraphPane; mPane.Title.Text = "RGB"; var czerwony = new PointPairList(); var zielony = new PointPairList(); var niebieski = new PointPairList(); for (int i = 0; i < 255; i++) { czerwony.Add(i, RedHist[i]); zielony.Add(i, GreenHist[i]); niebieski.Add(i, BlueHist[i]); } mPane.CurveList.Clear(); LineItem wykresR = mPane.AddCurve("R", czerwony, Color.Red, SymbolType.Default); LineItem wykresG = mPane.AddCurve("R", zielony, Color.Green, SymbolType.Default); LineItem wykresB = mPane.AddCurve("R", niebieski, Color.Blue, SymbolType.Default); zedGraph.AxisChange(); zedGraph.Refresh(); #endregion #region (uboga) kwantyzacja int wielkoscPrzedialu = 4; int aktualnaPozycjaR = 0; int aktualnaPozycjaG = 0; int aktualnaPozycjaB = 0; float wartoscR = 0; float wartoscG = 0; float wartoscB = 0; var RedHistQ = new float[RedHist.Length / wielkoscPrzedialu]; var GreenHistQ = new float[GreenHist.Length / wielkoscPrzedialu]; var BlueHistQ = new float[BlueHist.Length / wielkoscPrzedialu]; for (int i = 0; i < RedHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscR += RedHist[aktualnaPozycjaR + j]; } RedHistQ[i] = wartoscR; wartoscR = 0; aktualnaPozycjaR++; } for (int i = 0; i < GreenHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscG += GreenHist[aktualnaPozycjaG + j]; } GreenHistQ[i] = wartoscG; wartoscG = 0; aktualnaPozycjaG++; } for (int i = 0; i < BlueHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscB += BlueHist[aktualnaPozycjaB + j]; } BlueHistQ[i] = wartoscB; wartoscB = 0; aktualnaPozycjaB++; } GraphPane mPane2 = zedGraphQ.GraphPane; mPane2.Title.Text = "RGB kwant."; var czerwonyQ = new PointPairList(); var zielonyQ = new PointPairList(); var niebieskiQ = new PointPairList(); for (int i = 0; i < RedHistQ.Length; i++) { czerwonyQ.Add(i, RedHistQ[i]); zielonyQ.Add(i, GreenHistQ[i]); niebieskiQ.Add(i, BlueHistQ[i]); } mPane2.CurveList.Clear(); LineItem wykresRQ = mPane2.AddCurve("R", czerwonyQ, Color.Red, SymbolType.Default); LineItem wykresGQ = mPane2.AddCurve("G", zielonyQ, Color.Green, SymbolType.Default); LineItem wykresBQ = mPane2.AddCurve("B", niebieskiQ, Color.Blue, SymbolType.Default); zedGraphQ.AxisChange(); zedGraphQ.Refresh(); #endregion }
/// <summary> /// 2D計算值方圖(色調與飽和度) ,使用emgucv提供的cvInvoke去調用opencv的函式 /// </summary> private static DenseHistogram Cal2DHsvHist(IntPtr srcImage, int h_bins, int s_bins) { try { DenseHistogram histDense; int[] hist_size = new int[2] { h_bins, s_bins }; IntPtr hsv = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(srcImage), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); IntPtr h_plane = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(srcImage), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 1); IntPtr s_plane = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(srcImage), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 1); IntPtr v_plane = CvInvoke.cvCreateImage(CvInvoke.cvGetSize(srcImage), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 1); IntPtr[] planes = new IntPtr[2] { h_plane, s_plane }; /* H 分量的变化范围 */ float[] h_ranges = new float[2] { 0, h_max_range }; /* S 分量的变化范围*/ float[] s_ranges = new float[2] { 0, s_max_range }; IntPtr inPtr1 = new IntPtr(0); IntPtr inPtr2 = new IntPtr(0); //GCHandle:提供從Unmanaged 記憶體存取Managed 物件的方法。 //配置指定型別的數值記憶體 GCHandle gch1 = GCHandle.Alloc(h_ranges, GCHandleType.Pinned); GCHandle gch2 = GCHandle.Alloc(s_ranges, GCHandleType.Pinned); try { inPtr1 = gch1.AddrOfPinnedObject(); inPtr2 = gch2.AddrOfPinnedObject(); } finally { gch1.Free(); gch2.Free(); } //有上述的GCHandle,此行才有作用 IntPtr[] ranges = new IntPtr[2] { inPtr1, inPtr2 }; /* 输入图像转换到HSV颜色空间 */ CvInvoke.cvCvtColor(srcImage, hsv, Emgu.CV.CvEnum.COLOR_CONVERSION.CV_BGR2HSV); CvInvoke.cvSplit(hsv, h_plane, s_plane, v_plane, System.IntPtr.Zero); // 分离的单通道数组d /* 创建直方图,二维, 每个维度上均分 */ //emgucv的DenseHistogram資料格式也可使用cvInvoke的openCV函式 RangeF hRange = new RangeF(0f, h_max_range); //H色調分量的變化範圍 RangeF sRange = new RangeF(0f, s_max_range); //S飽和度分量的變化範圍 histDense = new DenseHistogram(hist_size, new RangeF[] { hRange, sRange }); CvInvoke.cvCalcHist(planes, histDense, false, System.IntPtr.Zero); return(histDense); } catch (Exception ex) { throw new InvalidOperationException(ex.Message); } }
/// <summary> /// Generate histograms for the image. One histogram is generated for each color channel. /// You will need to call the Refresh function to do the painting afterward. /// </summary> /// <param name="image">The image to generate histogram from</param> /// <param name="numberOfBins">The number of bins for each histogram</param> public void GenerateHistograms(IImage image, int numberOfBins) { IImage[] channels = image.Split(); Type imageType = Toolbox.GetBaseType(image.GetType(), "Image`2"); IColor typeOfColor = Activator.CreateInstance(imageType.GetGenericArguments()[0]) as IColor; String[] channelNames = Reflection.ReflectColorType.GetNamesOfChannels(typeOfColor); Color[] colors = Reflection.ReflectColorType.GetDisplayColorOfChannels(typeOfColor); float minVal, maxVal; #region Get the maximum and minimum color intensity values Type typeOfDepth = imageType.GetGenericArguments()[1]; if (typeOfDepth == typeof(Byte)) { minVal = 0.0f; maxVal = 256.0f; } else { #region obtain the maximum and minimum color value double[] minValues, maxValues; Point[] minLocations, maxLocations; image.MinMax(out minValues, out maxValues, out minLocations, out maxLocations); double min = minValues[0], max = maxValues[0]; for (int i = 1; i < minValues.Length; i++) { if (minValues[i] < min) min = minValues[i]; if (maxValues[i] > max) max = maxValues[i]; } #endregion minVal = (float)min; maxVal = (float)max; } #endregion for (int i = 0; i < channels.Length; i++) using (DenseHistogram hist = new DenseHistogram(numberOfBins, new RangeF(minVal, maxVal))) { hist.Calculate(new IImage[1] { channels[i] }, true, null); AddHistogram(channelNames[i], colors[i], hist); } }
////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// drawing hue color still have problem /// 1D值方圖(色調) 的繪製,使用emgucv提供的cvInvoke去調用opencv的函式 /// 繪製與範例的值方圖一致目前先採用 /// </summary> /// <param name="histDense"></param> /// <returns>回傳繪製值方圖的影像,直接顯示即可</returns> public static Image <Bgr, Byte> Generate1DHistogramImgForDraw(DenseHistogram histDense) { try { float max_value = 0.0f; int[] a1 = new int[100]; int[] b1 = new int[100]; float ax = 0; int h_bins = histDense.BinDimension[0].Size; //1.使用Intptr // CvInvoke.cvGetMinMaxHistValue(histPtr, ref ax, ref max_value, a1, b1); //2.emgucv的DenseHistogram資料格式也可使用cvInvoke的openCV函式 CvInvoke.cvGetMinMaxHistValue(histDense, ref ax, ref max_value, a1, b1); /* 取最大的顏色的位置 並換成RGB * foreach (int index in a1) * { * Console.WriteLine("location="+index+",H Color = "+ HueToBgr(index * 180.0d / h_bins)); * } * */ /* 设置直方图显示图像 */ int height = 240; int width = 800; IntPtr hist_img = CvInvoke.cvCreateImage(new System.Drawing.Size(width, height), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); CvInvoke.cvZero(hist_img); /* 用来进行HSV到RGB颜色转换的临时单位图像 */ IntPtr hsv_color = CvInvoke.cvCreateImage(new System.Drawing.Size(1, 1), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); IntPtr rgb_color = CvInvoke.cvCreateImage(new System.Drawing.Size(1, 1), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); int bin_w = width / (h_bins); for (int h = 0; h < h_bins; h++) { /* 获得直方图中的统计次数,计算显示在图像中的高度 */ // //取得值方圖的數值位置,以便之後存成檔案 //2.DenseHistogram double bin_val = CvInvoke.cvQueryHistValue_1D(histDense, h); int intensity = (int)System.Math.Round(bin_val * height / max_value); /* 获得当前直方图代表的hue颜色,转换成RGB用于绘制 */ CvInvoke.cvRectangle(hist_img, new System.Drawing.Point(h * bin_w, height), new System.Drawing.Point((h + 1) * bin_w, height - intensity), HueToBgr(h * 180.0d / h_bins), -1, Emgu.CV.CvEnum.LINE_TYPE.EIGHT_CONNECTED, 0); } /* *使用openCV函式繪製 * CvInvoke.cvNamedWindow("Source"); * CvInvoke.cvShowImage("Source", this.srcImage); * CvInvoke.cvNamedWindow("H-S Histogram"); * CvInvoke.cvShowImage("H-S Histogram", hist_img); * CvInvoke.cvWaitKey(0); * */ return(EmguFormatConvetor.IplImagePointerToEmgucvImage <Bgr, Byte>(hist_img)); } catch (Exception ex) { throw new InvalidOperationException(ex.Message); } }
private void MainLoop() { CurrentFrame = Cam.QueryFrame().Convert <Hsv, byte>(); Image <Gray, byte>[] channels; Image <Gray, byte> HistImg1 = new Image <Gray, byte>(500, 500); Image <Gray, byte> HistImg2 = new Image <Gray, byte>(500, 500); Image <Gray, byte> ProbImage; DenseHistogram hist1 = new DenseHistogram(new int[] { 10, 10 }, new RangeF[] { new RangeF(0, 255), new RangeF(0, 255) }); DenseHistogram hist2 = new DenseHistogram(new int[] { 10, 10 }, new RangeF[] { new RangeF(0, 255), new RangeF(0, 255) }); MCvConnectedComp comp; MCvTermCriteria criteria = new MCvTermCriteria(10, 1); MCvBox2D box; while (true) { CurrentFrame = Cam.QueryFrame().Convert <Hsv, byte>(); if (OnSettingArea && TrackArea != Rectangle.Empty) { CurrentFrame.ROI = TrackArea; channels = CurrentFrame.Split(); hist1.Calculate(new Image <Gray, byte>[] { channels[0], channels[1] }, false, null); CurrentFrame.Not().CopyTo(CurrentFrame); CurrentFrame.ROI = Rectangle.Empty; CurrentFrame.Draw(TrackArea, new Hsv(100, 100, 100), 2); imageBox1.Image = CurrentFrame; } else { if (TrackArea != Rectangle.Empty) { channels = CurrentFrame.Split(); ProbImage = hist1.BackProject <byte>(new Image <Gray, byte>[] { channels[0], channels[1] }); imageBox_Hist2.Image = ProbImage.Convert <Gray, byte>(); lock (LockObject) { if (TrackArea.Height * TrackArea.Width > 0) { CvInvoke.cvCamShift(ProbImage, TrackArea, criteria, out comp, out box); TrackArea = comp.rect; CurrentFrame.Draw(box, new Hsv(100, 100, 100), 2); } /** * ResetContourPoints(); * for (int i = 0; i < 60; i++) * { * ProbImage.Snake(ContourPoints, (float)1.0, (float)0.5, (float)1.5, new Size(17, 17), criteria, true); * } * CurrentFrame.DrawPolyline(ContourPoints, true, new Hsv(100, 100, 100), 2); */ } } imageBox1.Image = CurrentFrame; //calculate histogram; //channels = CurrentFrame.Split(); //hist2.Calculate(new Image<Gray, byte>[] { channels[0], channels[1] }, false, null); //hist2.Normalize(1); //HistImg1.SetZero(); //DrawHist2D(HistImg1, hist1); //imageBox_Hist1.Image = HistImg1; } } }
/// <summary> /// 2D值方圖(色調與飽和度) 的繪製,使用emgucv提供的cvInvoke去調用opencv的函式 /// 繪製與範例的值方圖一致目前先採用 /// </summary> /// <param name="histDense"></param> /// <returns>回傳繪製值方圖的影像,直接顯示即可</returns> public static Image <Bgr, Byte> Generate2DHistogramImgForDraw(DenseHistogram histDense) { try { float max_value = 0.0f; int[] a1 = new int[100]; int[] b1 = new int[100]; float ax = 0; int h_bins = histDense.BinDimension[0].Size; int s_bins = histDense.BinDimension[1].Size; //1.使用Intptr // CvInvoke.cvGetMinMaxHistValue(histPtr, ref ax, ref max_value, a1, b1); //2.emgucv的DenseHistogram資料格式也可使用cvInvoke的openCV函式 CvInvoke.cvGetMinMaxHistValue(histDense, ref ax, ref max_value, a1, b1); /* 设置直方图显示图像 */ int height = 300; int width; //如果設定的bins超過視窗設定的顯示範圍,另外給予可以符合用額外的彈出視窗顯示的值 if (h_bins * s_bins > 800) { width = h_bins * s_bins * 2; } else { width = 800; } IntPtr hist_img = CvInvoke.cvCreateImage(new System.Drawing.Size(width, height), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); CvInvoke.cvZero(hist_img); /* 用来进行HSV到RGB颜色转换的临时单位图像 */ IntPtr hsv_color = CvInvoke.cvCreateImage(new System.Drawing.Size(1, 1), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); IntPtr rgb_color = CvInvoke.cvCreateImage(new System.Drawing.Size(1, 1), Emgu.CV.CvEnum.IPL_DEPTH.IPL_DEPTH_8U, 3); int bin_w = width / (h_bins * s_bins); for (int h = 0; h < h_bins; h++) { for (int s = 0; s < s_bins; s++) { int i = h * s_bins + s; /* 获得直方图中的统计次数,计算显示在图像中的高度 */ // //取得值方圖的數值位置,以便之後存成檔案 //1.Intptr //double bin_val = CvInvoke.cvQueryHistValue_2D(histPtr, h, s); //2.DenseHistogram double bin_val = CvInvoke.cvQueryHistValue_2D(histDense, h, s); int intensity = (int)System.Math.Round(bin_val * height / max_value); /* 获得当前直方图代表的颜色,转换成RGB用于绘制 */ CvInvoke.cvSet2D(hsv_color, 0, 0, new Emgu.CV.Structure.MCvScalar(h * 180.0f / h_bins, s * 255.0f / s_bins, 255, 0)); CvInvoke.cvCvtColor(hsv_color, rgb_color, COLOR_CONVERSION.CV_HSV2BGR); Emgu.CV.Structure.MCvScalar color = CvInvoke.cvGet2D(rgb_color, 0, 0); CvInvoke.cvRectangle(hist_img, new System.Drawing.Point(i * bin_w, height), new System.Drawing.Point((i + 1) * bin_w, height - intensity), color, -1, Emgu.CV.CvEnum.LINE_TYPE.EIGHT_CONNECTED, 0); } } /* *使用openCV函式繪製 * CvInvoke.cvNamedWindow("Source"); * CvInvoke.cvShowImage("Source", this.srcImage); * CvInvoke.cvNamedWindow("H-S Histogram"); * CvInvoke.cvShowImage("H-S Histogram", hist_img); * CvInvoke.cvWaitKey(0); * */ return(EmguFormatConvetor.IplImagePointerToEmgucvImage <Bgr, Byte>(hist_img)); } catch (Exception ex) { throw new InvalidOperationException(ex.Message); } }
public abstract bool Execute(DenseHistogram hist, out float minPos);
////////////////////////////////////////////////////////////////////////////////////////////// #endregion #region 直方圖匹配 ////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// 匹配值方圖,使用CV_COMP_BHATTACHARYYA /// </summary> /// <param name="templateHist"></param> /// <param name="observedHist"></param> /// <returns>回傳匹配率,CV_COMP_BHATTACHARYYA方法,數值越低比對越精準,反之相似度低,範圍0-1</returns> public static double CompareHist(DenseHistogram templateHist, DenseHistogram observedHist) { return(CvInvoke.cvCompareHist(templateHist, observedHist, HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA)); }
protected void Button1_Click(object sender, EventArgs e) { if (FileUploader.HasFile) { try { FileUploader.SaveAs(Server.MapPath(DefaultFileName) + FileUploader.FileName); Image <Bgr, Byte> originalImage = new Image <Bgr, byte>(Server.MapPath(DefaultFileName) + FileUploader.FileName); int width, height, channels = 0; width = originalImage.Width; height = originalImage.Height; channels = originalImage.NumberOfChannels; Image <Bgr, byte> colorImage = new Image <Bgr, byte>(originalImage.ToBitmap()); Image <Gray, byte> grayImage = colorImage.Convert <Gray, Byte>(); float[] GrayHist; DenseHistogram Histo = new DenseHistogram(255, new RangeF(0, 255)); Histo.Calculate(new Image <Gray, Byte>[] { grayImage }, true, null); GrayHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(GrayHist, 0); float largestHist = GrayHist[0]; int thresholdHist = 0; for (int i = 0; i < 255; i++) { if (GrayHist[i] > largestHist) { largestHist = GrayHist[i]; thresholdHist = i; } } grayImage = grayImage.ThresholdAdaptive(new Gray(255), ADAPTIVE_THRESHOLD_TYPE.CV_ADAPTIVE_THRESH_MEAN_C, THRESH.CV_THRESH_BINARY, 85, new Gray(4)); colorImage = colorImage.Copy(); int countRedCells = 0; using (MemStorage storage = new MemStorage()) { for (Contour <Point> contours = grayImage.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, storage); contours != null; contours = contours.HNext) { Contour <Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.015, storage); if (currentContour.BoundingRectangle.Width > 20) { CvInvoke.cvDrawContours(colorImage, contours, new MCvScalar(0, 0, 255), new MCvScalar(0, 0, 255), -1, 2, Emgu.CV.CvEnum.LINE_TYPE.EIGHT_CONNECTED, new Point(0, 0)); colorImage.Draw(currentContour.BoundingRectangle, new Bgr(0, 255, 0), 1); countRedCells++; } } } Image <Gray, byte> grayImageCopy2 = originalImage.Convert <Gray, Byte>(); grayImageCopy2 = grayImageCopy2.ThresholdBinary(new Gray(100), new Gray(255)); colorImage = colorImage.Copy(); int countMalaria = 0; using (MemStorage storage = new MemStorage()) { for (Contour <Point> contours = grayImageCopy2.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_TREE, storage); contours != null; contours = contours.HNext) { Contour <Point> currentContour = contours.ApproxPoly(contours.Perimeter * 0.015, storage); if (currentContour.BoundingRectangle.Width > 20) { CvInvoke.cvDrawContours(colorImage, contours, new MCvScalar(255, 0, 0), new MCvScalar(255, 0, 0), -1, 2, Emgu.CV.CvEnum.LINE_TYPE.EIGHT_CONNECTED, new Point(0, 0)); colorImage.Draw(currentContour.BoundingRectangle, new Bgr(0, 255, 0), 1); countMalaria++; } } } colorImage.Save(Server.MapPath(DefaultFileName2) + FileUploader.FileName); inputDiv.Attributes["style"] = "display: block; margin-left: auto; margin-right: auto"; outputDiv.Attributes["style"] = "display: block; margin-left: auto; margin-right: auto"; Image1.ImageUrl = this.ResolveUrl(DefaultFileName + FileUploader.FileName); Image2.ImageUrl = this.ResolveUrl(DefaultFileName2 + FileUploader.FileName); Chart1.DataBindTable(GrayHist); Label1.Text = "Uploaded Successfully"; Label2.Text = "File name: " + FileUploader.PostedFile.FileName + "<br>" + "File Size: " + FileUploader.PostedFile.ContentLength + " kb<br>" + "Content type: " + FileUploader.PostedFile.ContentType + "<br>" + "Resolution: " + width.ToString() + "x" + height.ToString() + "<br>" + "Number of channels: " + channels.ToString() + "<br>" + "Histogram (maximum value): " + largestHist + " @ " + thresholdHist; LabelRed.Text = countRedCells.ToString(); LabelMalaria.Text = countMalaria.ToString(); } catch (Exception ex) { Label1.Text = "ERROR: " + ex.Message.ToString(); Label2.Text = ""; } } else { Label1.Text = "You have not specified a file."; Label2.Text = ""; } }
public RotatedRect Tracking(Image <Bgr, Byte> image) { GetFrameHue(image); // User changed bins num ,recalculate Hist if (Main._advancedHsv) { if (bins != Main.HsvSetting.Getbins) { bins = Main.HsvSetting.Getbins; hist.Dispose(); hist = new DenseHistogram(bins, new RangeF(0, 180)); CalcHist(image); } } backprojection = hist.BackProject(new Image <Gray, Byte>[] { hue }); // Add mask backprojection._And(mask); // FindContours //CvInvoke.Canny(backprojection, backcopy, 3, 6); backprojection.CopyTo(backcopy); CvInvoke.FindContours(backcopy, vvp, null, Emgu.CV.CvEnum.RetrType.External, Emgu.CV.CvEnum.ChainApproxMethod.ChainApproxNone); int trackArea = trackingWindow.Height * trackingWindow.Width; FindTargetByArea(vvp, trackArea * 0.25, trackArea * 10, ref vvpApprox); vvpApproxDensity = GetVVPDensity(vvpApprox, out vvpApproxRect); targetVVPIndex = FindTargetByOverlap(vvpApprox, trackingWindow); //FindTargetByCenter(vvpApprox, new PointF(trackingWindow.X + trackingWindow.Width / 2, trackingWindow.Y + trackingWindow.Height / 2)); // If lost trackbox if (trackingWindow.IsEmpty || trackingWindow.Width <= 10 || trackingWindow.Height <= 10 || _lost || targetVVPIndex == -1) { if (!timer.IsRunning) { timer.Start(); } if (timer.ElapsedMilliseconds > 1000) { _lost = true; } if (timer.ElapsedMilliseconds > 3000) { //targetVVPIndex = Array.IndexOf(vvpApproxDensity, vvpApproxDensity.Max()); } for (int i = 0; i < vvpApproxDensity.Length; i++) { if (vvpApproxDensity[i] >= targetDensity * 0.8) { trackingWindow = vvpApproxRect[i]; _lost = false; timer.Reset(); } } } else { trackbox = CvInvoke.CamShift(backprojection, ref trackingWindow, new MCvTermCriteria(10, 1)); targetDensity += vvpApproxDensity[targetVVPIndex]; targetDensity /= 2; if (timer.IsRunning) { timer.Reset(); } } return(trackbox); }
private void MainLoop() { CurrentFrame = Cam.QueryFrame().Convert<Hsv, byte>(); Image<Gray, byte>[] channels; Image<Gray, byte> HistImg1 = new Image<Gray, byte>(500, 500); Image<Gray, byte> HistImg2 = new Image<Gray, byte>(500, 500); Image<Gray, byte> ProbImage; DenseHistogram hist1 = new DenseHistogram(new int[] { 10, 10 }, new RangeF[] { new RangeF(0, 255), new RangeF(0, 255) }); DenseHistogram hist2 = new DenseHistogram(new int[] { 10, 10 }, new RangeF[] { new RangeF(0, 255), new RangeF(0, 255) }); MCvConnectedComp comp; MCvTermCriteria criteria = new MCvTermCriteria(10, 1); MCvBox2D box; while (true) { CurrentFrame = Cam.QueryFrame().Convert<Hsv, byte>(); if (OnSettingArea && TrackArea != Rectangle.Empty) { CurrentFrame.ROI = TrackArea; channels = CurrentFrame.Split(); hist1.Calculate(new Image<Gray, byte>[] { channels[0], channels[1] }, false, null); CurrentFrame.Not().CopyTo(CurrentFrame); CurrentFrame.ROI = Rectangle.Empty; CurrentFrame.Draw(TrackArea, new Hsv(100, 100, 100), 2); imageBox1.Image = CurrentFrame; } else { if (TrackArea != Rectangle.Empty) { channels = CurrentFrame.Split(); ProbImage = hist1.BackProject<byte>(new Image<Gray, byte>[] { channels[0], channels[1] }); imageBox_Hist2.Image = ProbImage.Convert<Gray, byte>(); lock (LockObject) { if (TrackArea.Height * TrackArea.Width > 0) { CvInvoke.cvCamShift(ProbImage, TrackArea, criteria, out comp, out box); TrackArea = comp.rect; CurrentFrame.Draw(box, new Hsv(100, 100, 100), 2); } /** ResetContourPoints(); for (int i = 0; i < 60; i++) { ProbImage.Snake(ContourPoints, (float)1.0, (float)0.5, (float)1.5, new Size(17, 17), criteria, true); } CurrentFrame.DrawPolyline(ContourPoints, true, new Hsv(100, 100, 100), 2); */ } } imageBox1.Image = CurrentFrame; //calculate histogram; //channels = CurrentFrame.Split(); //hist2.Calculate(new Image<Gray, byte>[] { channels[0], channels[1] }, false, null); //hist2.Normalize(1); //HistImg1.SetZero(); //DrawHist2D(HistImg1, hist1); //imageBox_Hist1.Image = HistImg1; } } }
////////////////////////////////////////////////////////////////////////////////////////////// #endregion #region 匹配值方圖 ////////////////////////////////////////////////////////////////////////////////////////////// /// <summary> /// 直方圖匹配(使用BHATTACHARYYA) /// </summary> /// <param name="template">樣板值方圖</param> /// <param name="observedSrcImg">要比對的圖像</param> /// <param name="observedHist">觀察影像的直方圖</param> /// <returns>匹配率越低表示匹配度越高</returns> public static double CompareHistogram(DenseHistogram template, Image <Bgr, Byte> observedSrcImg, out DenseHistogram observedHist) { //計算影像的值方圖 if (template.Dimension == 1) { observedHist = HistogramOperation.CalHsvHistogram(observedSrcImg.Ptr, template.Dimension, template.BinDimension[0].Size); } else if (template.Dimension == 2) { observedHist = HistogramOperation.CalHsvHistogram(observedSrcImg.Ptr, template.Dimension, template.BinDimension[0].Size, template.BinDimension[1].Size); } else { observedHist = HistogramOperation.CalHsvHistogram(observedSrcImg.Ptr, template.Dimension, template.BinDimension[0].Size, template.BinDimension[1].Size, template.BinDimension[2].Size); } //匹配後回傳匹配率 return(HistogramOperation.CompareHist(template, observedHist)); }
public void tracking(Mat hist_roi) { using (var nextframe = cap.QueryFrame().ToImage <Bgr, Byte>()) { if (nextframe != null) { float[] range = { 0, 180 }; int[] histsize = { 180 }; int[] channels = { 0, 0 }; int[] Chn = { 0 }; //Rectangle ret = new Rectangle(); //ret = nextframe.Mat; Rectangle trackwindow = new Rectangle(rectx, recty, rectw, recth); Mat hsv = new Mat(); Mat hist = new DenseHistogram(16, new RangeF(0, 16)); CvInvoke.CvtColor(nextframe, hsv, ColorConversion.Bgr2Hsv);//hsv Mat mask = new Mat(); CvInvoke.InRange(hsv, new ScalarArray(new MCvScalar(0, 60, 32)), new ScalarArray(new MCvScalar(180, 256, 255)), mask); Mat hue = new Mat(); hue.Create(hsv.Rows, hsv.Cols, hsv.Depth, 0); //CvInvoke.MixChannels() int[] chn = { 0, 0 }; var vhue = new VectorOfMat(hue); var vhsv = new VectorOfMat(hsv); //var vhsv = new VectorOfMat(hsv); CvInvoke.MixChannels(vhsv, vhue, chn); Mat dst = new Mat(); float[] Range = { 0, 180, 0, 255 }; CvInvoke.CalcBackProject(vhue, Chn, hist_roi, dst, Range, 1); Size s = new Size(5, 5); // CvInvoke.GetStructuringElement(ElementShape.Ellipse,s); CvInvoke.Threshold(dst, dst, 50, 255, 0); imageBox1.Image = dst; // MCvTermCriteria termcriteria = new MCvTermCriteria(TermCritType.Eps | TermCritType.Iter, 10, 1); MCvTermCriteria termCrit = new MCvTermCriteria(10, 0.1); RotatedRect result = CvInvoke.CamShift(dst, ref trackerbox, termCrit); var grayframe = nextframe.Convert <Gray, byte>(); grayframe.ROI = trackerbox; var grayface = grayframe.Copy().Mat; var faces = haar.DetectMultiScale(grayface, 1.1, 10, Size.Empty); int totalface = faces.Length; RectangleF ret = trackerbox; // PointF[] PTS = CvInvoke.BoxPoints(ret); // Point[] pts = new Point[10]; //for (int x = 0; x < PTS.Length; x++) // { // pts[x] = Point.Round(PTS[x]); CvInvoke.CvtColor(dst, nextframe, ColorConversion.Gray2Bgr); //CvInvoke.Polylines(nextframe, pts, true, new MCvScalar(255, 0)); // if (totalface == 1) // { MCvScalar color = new MCvScalar(0, 0, 255); CvInvoke.Ellipse(nextframe, ret, color, 3, LineType.AntiAlias); // } } imageBox1.Image = nextframe; } //while loop end*/ }
public override bool Execute(DenseHistogram hist, out float minPos) { minPos = Param; return(true); }
private float[] GetHistogramData(Image<Gray, byte> imgGray) { float[] histoGrammData; DenseHistogram histoGramm = new DenseHistogram(255, new RangeF(0, 255)); histoGramm.Calculate(new Image<Gray, Byte>[] { imgGray }, true, null); //The data is here //Histo.MatND.ManagedArray histoGrammData = new float[256]; histoGramm.MatND.ManagedArray.CopyTo(histoGrammData, 0); return histoGrammData; }
public mHistogram(Image <Bgr, Byte> img) { Obraz = img; #region oblicz wartosci histogramów var Histo = new DenseHistogram(255, new RangeF(0, 255)); Image <Gray, Byte> img2Blue = img[0]; Image <Gray, Byte> img2Green = img[1]; Image <Gray, Byte> img2Red = img[2]; Histo.Calculate(new[] { img2Blue }, true, null); BlueHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(BlueHist, 0); Histo.Clear(); Histo.Calculate(new[] { img2Green }, true, null); GreenHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(GreenHist, 0); Histo.Clear(); Histo.Calculate(new[] { img2Red }, true, null); RedHist = new float[256]; Histo.MatND.ManagedArray.CopyTo(RedHist, 0); #endregion #region 'kwantyzacja histogramów'' int wielkoscPrzedialu = 4; int aktualnaPozycjaR = 0; int aktualnaPozycjaG = 0; int aktualnaPozycjaB = 0; float wartoscR = 0; float wartoscG = 0; float wartoscB = 0; RedHistQ = new float[RedHist.Length / wielkoscPrzedialu]; GreenHistQ = new float[GreenHist.Length / wielkoscPrzedialu]; BlueHistQ = new float[BlueHist.Length / wielkoscPrzedialu]; for (int i = 0; i < RedHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscR += RedHist[aktualnaPozycjaR + j]; } RedHistQ[i] = wartoscR; wartoscR = 0; aktualnaPozycjaR++; } for (int i = 0; i < GreenHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscG += GreenHist[aktualnaPozycjaG + j]; } GreenHistQ[i] = wartoscG; wartoscG = 0; aktualnaPozycjaG++; } for (int i = 0; i < BlueHist.Length / wielkoscPrzedialu; i++) { for (int j = 0; j < wielkoscPrzedialu; j++) { wartoscB += BlueHist[aktualnaPozycjaB + j]; } BlueHistQ[i] = wartoscB; wartoscB = 0; aktualnaPozycjaB++; } #endregion }
/// <summary> /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation. /// </summary> /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param> /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param> /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param> public static MatchedSURFFeature[] VoteForSizeAndOrientation(MatchedSURFFeature[] matchedFeatures, double scaleIncrement, int rotationBins) { int elementsCount = matchedFeatures.Length; float[] scales = new float[elementsCount]; float[] rotations = new float[elementsCount]; float[] flags = new float[elementsCount]; float minScale = float.MaxValue; float maxScale = float.MinValue; for (int i = 0; i < matchedFeatures.Length; i++) { float scale = (float)matchedFeatures[i].ObservedFeature.Point.size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.Point.size; scale = (float)Math.Log10(scale); scales[i] = scale; if (scale < minScale) minScale = scale; if (scale > maxScale) maxScale = scale; float rotation = matchedFeatures[i].ObservedFeature.Point.dir - matchedFeatures[i].SimilarFeatures[0].Feature.Point.dir; rotations[i] = rotation < 0.0 ? rotation + 360 : rotation; } int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1); int count; using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) })) { GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned); GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix<float> flagsMat = new Matrix<float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix<float> scalesMat = new Matrix<float>(1, elementsCount, scaleHandle.AddrOfPinnedObject())) using (Matrix<float> rotationsMat = new Matrix<float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix<float>[] { scalesMat, rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } scaleHandle.Free(); rotationHandle.Free(); flagsHandle.Free(); MatchedSURFFeature[] matchedGoodFeatures = new MatchedSURFFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) if (flags[i] != 0) matchedGoodFeatures[index++] = matchedFeatures[i]; return matchedGoodFeatures; } }
public void TestHistogram() { using (Image<Bgr, Byte> img = new Image<Bgr, byte>("stuff.jpg")) using (Image<Hsv, Byte> img2 = img.Convert<Hsv, Byte>()) { Image<Gray, Byte>[] HSVs = img2.Split(); using (DenseHistogram h = new DenseHistogram(20, new RangeF(0, 180))) { h.Calculate(new Image<Gray, Byte>[1] { HSVs[0] }, true, null); using (Image<Gray, Byte> bpj = h.BackProject(new Image<Gray, Byte>[1] { HSVs[0] })) { Size sz = bpj.Size; } using (Image<Gray, Single> patchBpj = h.BackProjectPatch( new Image<Gray, Byte>[1] { HSVs[0] }, new Size(5, 5), Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_CHISQR, 1.0)) { Size sz = patchBpj.Size; } } foreach (Image<Gray, Byte> i in HSVs) i.Dispose(); } }
/// <summary> /// Eliminate the matched features whose scale and rotation do not aggree with the majority's scale and rotation. /// </summary> /// <param name="rotationBins">The numbers of bins for rotation, a good value might be 20 (which means each bin covers 18 degree)</param> /// <param name="scaleIncrement">This determins the different in scale for neighbour hood bins, a good value might be 1.5 (which means matched features in bin i+1 is scaled 1.5 times larger than matched features in bin i</param> /// <param name="matchedFeatures">The matched feature that will be participated in the voting. For each matchedFeatures, only the zero indexed ModelFeature will be considered.</param> public static MatchedSURFFeature[] VoteForSizeAndOrientation(MatchedSURFFeature[] matchedFeatures, double scaleIncrement, int rotationBins) { int elementsCount = matchedFeatures.Length; float[] scales = new float[elementsCount]; float[] rotations = new float[elementsCount]; float[] flags = new float[elementsCount]; float minScale = float.MaxValue; float maxScale = float.MinValue; for (int i = 0; i < matchedFeatures.Length; i++) { float scale = (float)matchedFeatures[i].ObservedFeature.Point.size / (float)matchedFeatures[i].SimilarFeatures[0].Feature.Point.size; scale = (float)Math.Log10(scale); scales[i] = scale; if (scale < minScale) { minScale = scale; } if (scale > maxScale) { maxScale = scale; } float rotation = matchedFeatures[i].ObservedFeature.Point.dir - matchedFeatures[i].SimilarFeatures[0].Feature.Point.dir; rotations[i] = rotation < 0.0 ? rotation + 360 : rotation; } int scaleBinSize = (int)Math.Max(((maxScale - minScale) / Math.Log10(scaleIncrement)), 1); int count; using (DenseHistogram h = new DenseHistogram(new int[] { scaleBinSize, rotationBins }, new RangeF[] { new RangeF(minScale, maxScale), new RangeF(0, 360) })) { GCHandle scaleHandle = GCHandle.Alloc(scales, GCHandleType.Pinned); GCHandle rotationHandle = GCHandle.Alloc(rotations, GCHandleType.Pinned); GCHandle flagsHandle = GCHandle.Alloc(flags, GCHandleType.Pinned); using (Matrix <float> flagsMat = new Matrix <float>(1, elementsCount, flagsHandle.AddrOfPinnedObject())) using (Matrix <float> scalesMat = new Matrix <float>(1, elementsCount, scaleHandle.AddrOfPinnedObject())) using (Matrix <float> rotationsMat = new Matrix <float>(1, elementsCount, rotationHandle.AddrOfPinnedObject())) { h.Calculate(new Matrix <float>[] { scalesMat, rotationsMat }, true, null); float minVal, maxVal; int[] minLoc, maxLoc; h.MinMax(out minVal, out maxVal, out minLoc, out maxLoc); h.Threshold(maxVal * 0.5); CvInvoke.cvCalcBackProject(new IntPtr[] { scalesMat.Ptr, rotationsMat.Ptr }, flagsMat.Ptr, h.Ptr); count = CvInvoke.cvCountNonZero(flagsMat); } scaleHandle.Free(); rotationHandle.Free(); flagsHandle.Free(); MatchedSURFFeature[] matchedGoodFeatures = new MatchedSURFFeature[count]; int index = 0; for (int i = 0; i < matchedFeatures.Length; i++) { if (flags[i] != 0) { matchedGoodFeatures[index++] = matchedFeatures[i]; } } return(matchedGoodFeatures); } }
public ArrayList bgSubtraction(Image <Bgr, Byte> currImage, Image <Bgr, Byte> bgImage, ref Image <Gray, byte> finalBlobImg, ref ArrayList blobRect) { //### inputs are currImage and bgImage. Output is finalBlobImg - 1 for FG and 0 for BG and blobRect to store ROIs of each blob in this frame //### extracting pixels from currImage and bgImage byte[, ,] curImgPix = new byte[Main.w, Main.h, 1]; byte[, ,] bw_2d = new byte[Main.h / Main.b, Main.w / Main.b, 1]; ArrayList blobDistanceList = new ArrayList(1); curImgPix = bitmap22D(currImage); currImgGy = currImage.Convert <Gray, byte>(); bgImgGy = bgImage.Convert <Gray, byte>(); //### Motion Detection between currImage pixels and bgImage pixels motionDetection(curImgPix, bitmap22D(bgImgGy), Main.w, Main.h, Main.b, ref bw_2d); //Component Labeling for localizing objects that are moving. There can be more than one objects that are moving in the scene. byte m = 1; ArrayList mv = new ArrayList(); for (int x = 0; x < h / b; x++) { for (int y = 0; y < w / b; y++) { if (bw_2d[x, y, 0] == 1) { ArrayList label = new ArrayList(); compLabel(x, y, ++m, label, ref bw_2d); mv.Add(label); } } } //Console.WriteLine("-----No of Blobs=" + mv.Count); //### outimage - binay image of each blob; finalBlobImg - OR of all blob images to get final binay image with 1 for FG and 0 for BG finalBlobImg = new Image <Gray, byte>(w, h); Image <Gray, byte> outimage = new Image <Gray, byte>(w, h); //### blobRect - to store the ROIs of each blob in the currImage. blobRect = new ArrayList(); //Calculate xmin ymin xmax ymax for each blob detected foreach (ArrayList blob in mv) { if (blob.Count > 25) //no of blocks in each blob > 3 { //Console.WriteLine(blob.Count+"\n"); int xmin, xmax, ymin, ymax; IEnumerator iblob = blob.GetEnumerator(); iblob.MoveNext(); ArrayList pt = (ArrayList)iblob.Current; IEnumerator ipt = pt.GetEnumerator(); ipt.MoveNext(); //Console.Write("{y"+ipt.Current); ymin = ymax = (int)ipt.Current; ipt.MoveNext(); xmin = xmax = (int)ipt.Current; //Console.Write(" x"+ipt.Current+"}\n"); while (iblob.MoveNext()) { ArrayList ptt = (ArrayList)iblob.Current; IEnumerator iptt = ptt.GetEnumerator(); iptt.MoveNext(); int y = (int)iptt.Current; iptt.MoveNext(); int x = (int)iptt.Current; // Console.Write(x+","+y+" ; "); if (xmin > x) { xmin = x; } if (xmax < x) { xmax = x; } if (ymin > y) { ymin = y; } if (ymax < y) { ymax = y; } } //Console.WriteLine("****" + xmin + " " + xmax + " " + ymin + " " + ymax+" "+blob.Count); // g.drawRect((xmin*blk),(ymin*blk),((xmax-xmin)*blk)+blk,((ymax-ymin)*blk)+blk); subrect = new Rectangle((xmin * b), (ymin * b), ((xmax - xmin) * b) + b, ((ymax - ymin) * b) + b); blobRect.Add(subrect); currImage.Draw(subrect, new Bgr(0, 0, 0), 1); Image <Gray, byte> subImgBg = bgImgGy.GetSubRect(subrect); Image <Gray, byte> subImgFg = currImgGy.GetSubRect(subrect); //subImgBg.Save("bg.jpg"); //subImgFg.Save("fg"+k+++".jpg"); Image <Gray, byte> imMask = subImgFg.AbsDiff(subImgBg); //Console.WriteLine(Thread.CurrentThread.Name + " " + subrect); for (int i = 0; i < subrect.Height; i++) { for (int j = 0; j < subrect.Width; j++) { //subImgFg.Data[i, j, 0] = 255; if (imMask.Data[i, j, 0] < Main.ThSub) { imMask.Data[i, j, 0] = 0; outimage.Data[i + subrect.Y, j + subrect.X, 0] = 0; } else { imMask.Data[i, j, 0] = 255; outimage.Data[i + subrect.Y, j + subrect.X, 0] = 255; } } //Console.WriteLine(); } //imMask._Erode(1); //imMask._Dilate(2); outimage._Erode(2); outimage._Dilate(3); try { Image <Bgr, byte> subimg = currImage.And((outimage.Convert <Bgr, Byte>())).GetSubRect(subrect); //subimg.GetSubRect(subrect);//.Save(Thread.CurrentThread.Name + "\\" + k++ + ".jpg"); //Calc HISTOGRAM of each blob DenseHistogram histBlob = new DenseHistogram(hdims, hranges); //cvCreateHist(1, &hdims, CV_HIST_ARRAY, &hranges, 1); Image <Hsv, byte> hsvBlob = subimg.Convert <Hsv, byte>(); //extract the hue and value channels Image <Gray, Byte>[] channelsBlob = hsvBlob.Split(); //split into components Image <Gray, Byte>[] imghueBlob = new Image <Gray, byte> [1]; imghueBlob[0] = channelsBlob[0]; //hsv, so channels[0] is hue. Hsv hsv_lower = new Hsv(0, smin, Math.Min(vmin, vmax)); Hsv hsv_upper = new Hsv(180, 256, Math.Max(vmin, vmax)); Image <Gray, Byte> maskBlob = hsvBlob.InRange(hsv_lower, hsv_upper); histBlob.Calculate(imghueBlob, false, maskBlob); double distance = CvInvoke.cvCompareHist(Main.hist.Ptr, histBlob.Ptr, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA); //Console.WriteLine(Thread.CurrentThread.Name + " = " + distance); //Add rect and distance of each blob into blobDistanceList Blob currenBlob; currenBlob.rect = subrect; currenBlob.distance = distance; blobDistanceList.Add(currenBlob); } catch (CvException cve) { // MessageBox.Show(cve.StackTrace); } finalBlobImg = finalBlobImg.Or(outimage); } } return(blobDistanceList); }
private void CalibrateHSV(ref Image<Hsv, Byte> hsvImage, ref DenseHistogram histogram) { float horizontalFactor = 0.2f; float verticalFactor = 0.2f; int rectWidth = (int)(hsvImage.Width * horizontalFactor); int rectHeight = (int)(hsvImage.Height * verticalFactor); int topLeftX = (int)((((float)hsvImage.Width / 2) - rectWidth) / 2); int topLeftY = (int)(((float)hsvImage.Height - rectHeight) / 2); Rectangle rangeOfInterest = new Rectangle(topLeftX, topLeftY, rectWidth, rectHeight); Image<Gray, Byte> maskedImage = hsvImage.InRange( new Hsv(hue_min, saturation_min, value_min), new Hsv(hue_max, saturation_max, value_max)); Image<Hsv, byte> partToCompute = hsvImage.Copy(rangeOfInterest); int[] h_bins = { 30, 30 }; RangeF[] h_ranges = { new RangeF(0, 180), new RangeF(0, 255) }; Image<Gray, byte>[] channels = partToCompute.Split().Take(2).ToArray(); histogram = new DenseHistogram(h_bins, h_ranges); histogram.Calculate(channels, true, null); float minValue, maxValue; int[] posMinValue, posMaxValue; histogram.MinMax(out minValue, out maxValue, out posMinValue, out posMaxValue); histogram.Threshold( (double)minValue + (maxValue - minValue) * 40 / 100 ); hsvImage = maskedImage.Convert<Hsv, Byte>() //tu powstaje jakiś "First chance of exception..." .SmoothGaussian(5) .Dilate(1) .Convert<Rgb, Byte>() .ThresholdBinary(new Rgb(127,127,127), new Rgb(255,255,255)) .Convert<Hsv, Byte>(); //hsvImage.Draw(rangeOfInterest, new Hsv(255, 255, 255), 3); }
void dataStream(object sender, EventArgs e) { { RangeF[] range = new RangeF[2]; range[0] = new RangeF(0, 180); range[1] = new RangeF(0, 255); pollColorImageStream(); pollDepthImageStream(); //Color------------------ Bitmap bitmapColor = new Bitmap(colorImage.Width, colorImage.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb); BitmapData bmd = bitmapColor.LockBits(new System.Drawing.Rectangle(0, 0, colorImage.Width, colorImage.Height), ImageLockMode.ReadWrite, bitmapColor.PixelFormat); Marshal.Copy(colorPixelData, 0, bmd.Scan0, colorPixelData.Length); bitmapColor.UnlockBits(bmd); Image<Bgr, Byte> colorTemp = new Image<Bgr, Byte>(bitmapColor); //Color------------------end //depth------------------ byte[] byteDepth = new byte[640 * 480]; byte[] remap = new byte[640 * 480]; sensor.MapDepthFrameToColorFrame(DepthImageFormat.Resolution640x480Fps30, depthPixelData, ColorImageFormat.RgbResolution640x480Fps30, colorCoordinate); for (int y = 0; y < 480; y++) { for (int x = 0; x < 640; x++) { int position = y * 640 + x; short tempShort = depthPixelData[position]; //depthImage[y, x] = new Gray(tempShort); byteDepth[position] = (byte)(tempShort >> 8); //byteDepth[y, x] = new Gray((byte)(tempShort)); int positionRemap = colorCoordinate[position].Y * 640 + colorCoordinate[position].X; if (positionRemap > 640 * 480) continue; depthRemapData[positionRemap] = depthPixelData[position]; remap[positionRemap] = (byte)(tempShort >> 8); //byteDepth[y, x] = new Gray((byte)(tempShort)); } } Bitmap bitmapDepth = new Bitmap(depthImage.Width, depthImage.Height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); BitmapData bmd2 = bitmapDepth.LockBits(new System.Drawing.Rectangle(0, 0, depthImage.Width, depthImage.Height), ImageLockMode.ReadWrite, bitmapDepth.PixelFormat); Marshal.Copy(byteDepth, 0, bmd2.Scan0, byteDepth.Length); bitmapDepth.UnlockBits(bmd2); Image<Gray, Byte> depthTemp = new Image<Gray, Byte>(bitmapDepth); //depth------------------end Byte[] backFrame = new Byte[640 * 480]; BitmapImage trackingOut = new BitmapImage(); if (trackingFlag != 0) { Image<Hsv, Byte> hsv = new Image<Hsv, Byte>(640, 480); CvInvoke.cvCvtColor(colorTemp, hsv, COLOR_CONVERSION.CV_BGR2HSV); Image<Gray, Byte> hue = hsv.Split()[0]; //range of hist is 180 or 256? not quite sure DenseHistogram hist = new DenseHistogram(180, new RangeF(0.0f, 179.0f)); Image<Gray, Byte> mask = new Image<Gray, Byte>(trackWindow.Width, trackWindow.Height); for (int y = 0; y < 480; y++) { for (int x = 0; x < 640; x++) { if (x >= trackWindow.X && x < trackWindow.X + trackWindow.Width && y >= trackWindow.Y && y < trackWindow.Y + trackWindow.Height) mask[y - trackWindow.Y, x - trackWindow.X] = hue[y, x]; } } hist.Calculate(new IImage[] { mask }, false, null); //maybe need to re-scale the hist to 0~255? //back projection IntPtr backProject = CvInvoke.cvCreateImage(hsv.Size, IPL_DEPTH.IPL_DEPTH_8U, 1); CvInvoke.cvCalcBackProject(new IntPtr[1] { hue }, backProject, hist); CvInvoke.cvErode(backProject, backProject, IntPtr.Zero, 3); //CAMshift CvInvoke.cvCamShift(backProject, trackWindow, new MCvTermCriteria(50, 0.1), out trackComp, out trackBox); trackWindow = trackComp.rect; if (trackWindow.Width < 5 || trackWindow.Height < 5) { if (trackWindow.Width < 5) { trackWindow.X = trackWindow.X + trackWindow.Width / 2 - 3; trackWindow.Width = 6; } if (trackWindow.Height < 5) { trackWindow.Y = trackWindow.Y + trackWindow.Height / 2 - 3; trackWindow.Height = 6; } } Image<Bgr, Byte> showFrame = colorTemp; showFrame.Draw(trackWindow, new Bgr(System.Drawing.Color.Blue), 2); using (var stream = new MemoryStream()) { showFrame.Bitmap.Save(stream, ImageFormat.Bmp); trackingOut.BeginInit(); trackingOut.StreamSource = new MemoryStream(stream.ToArray()); trackingOut.EndInit(); } //calculate the average depth of tracking object int min = 65528, max = 0, num = 0; UInt32 sum = 0; for (int y = 0; y < trackWindow.Height; y++) { for (int x = 0; x < trackWindow.Width; x++) { int position = (trackWindow.X + x) + (trackWindow.Y + y) * 640; ushort temp = (ushort)depthRemapData[position]; if (temp != 65528 && temp != 0)//black { if (temp < min) min = temp; if (temp > max) max = temp; sum += temp; num++; } } } ushort average = 0; if (num != 0) { average = (ushort)(sum / num); } //Int32 depthInches = (Int32)((average >> DepthImageFrame.PlayerIndexBitmaskWidth) * 0.0393700787); //Int32 depthFt = depthInches / 12; //depthInches = depthInches % 12; Int32 depth = average >> DepthImageFrame.PlayerIndexBitmaskWidth; textBlock1.Text = String.Format("{0}mm", depth); Double distanceInMeter = (Double)depth / 1000; Messenger.Default.Send<Double>(distanceInMeter, "Distance"); } //if (rbtnColorFrame.IsChecked == true) //{ if (trackingFlag != 0) image1.Source = trackingOut; else image1.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Bgr32, null, colorPixelData, 640 * 4); // else // imageOutputBig.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Bgr32, null, colorPixelData, 640 * 4); // //imageOutputBig.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Gray8, null, backFrame, 640); // imageOutputSmall.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Gray16, null, depthRemapData, 640 * 2); //} //else //{ // imageOutputSmall.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Bgr32, null, colorPixelData, 640 * 4); // imageOutputBig.Source = BitmapSource.Create(640, 480, 96, 96, PixelFormats.Gray16, null, depthRemapData, 640 * 2); //} //if(CvInvoke.cvWaitKey(0)=='q') //{ // trackingFlag = 0; //} } }
public void Compute(Image <Bgr, Byte> image) { using (Image <Hsv, Byte> hsvImage = image.Convert <Hsv, byte>()) { Image <Gray, Byte>[] channels = hsvImage.Split(); Rectangle roi1 = new Rectangle(0, 0, image.Width / 2, image.Height / 2); Rectangle roi2 = new Rectangle(image.Width / 2, 0, image.Width / 2, image.Height / 2); Rectangle roi3 = new Rectangle(0, image.Height / 2, image.Width / 2, image.Height / 2); Rectangle roi4 = new Rectangle(image.Width / 2, image.Height / 2, image.Width / 2, image.Height / 2); Rectangle roiCenter = new Rectangle(image.Width / 4, image.Height / 4, image.Width / 2, image.Height / 2); DenseHistogram hist = new DenseHistogram(64, new RangeF(0.0f, 255.0f)); double factor = 100.0; List <float> totalHist = new List <float>(); for (int i = 0; i < channels.Length; ++i) { float[] histRaw1 = new float[64]; float[] histRaw2 = new float[64]; float[] histRaw3 = new float[64]; float[] histRaw4 = new float[64]; float[] histRawCenter = new float[64]; Image <Gray, byte>[] tempImages = new Image <Gray, byte>[] { channels[i] }; // Top Left channels[i].ROI = roi1; hist.Calculate(tempImages, false, null); hist.Normalize(factor); hist.MatND.ManagedArray.CopyTo(histRaw1, 0); totalHist.AddRange(histRaw1); // Top Right channels[i].ROI = roi2; hist.Calculate(tempImages, false, null); hist.Normalize(factor); hist.MatND.ManagedArray.CopyTo(histRaw2, 0); totalHist.AddRange(histRaw2); // Bottom Left channels[i].ROI = roi3; hist.Calculate(tempImages, false, null); hist.Normalize(factor); hist.MatND.ManagedArray.CopyTo(histRaw3, 0); totalHist.AddRange(histRaw3); // Bottom Right channels[i].ROI = roi4; hist.Calculate(tempImages, false, null); hist.Normalize(factor); hist.MatND.ManagedArray.CopyTo(histRaw4, 0); totalHist.AddRange(histRaw4); // Center channels[i].ROI = roiCenter; hist.Calculate(tempImages, false, null); hist.Normalize(factor); hist.MatND.ManagedArray.CopyTo(histRawCenter, 0); totalHist.AddRange(histRawCenter); tempImages = null; } _descriptors = new Matrix <float>(totalHist.ToArray()); _descriptors = _descriptors.Transpose(); } }
public void SetHist(DenseHistogram hist) { Reset(); _hist = hist; isTracked = true; }
//histogram match static bool HistogramMatch(Bitmap frame1, Bitmap frame2) { //convert bitmap to temp "Image" for color processing by older version of emgucv Image <Bgr, byte> frame1_Image = new Image <Bgr, byte>(frame1); Image <Bgr, byte> frame2_Image = new Image <Bgr, byte>(frame2); //convert frames to HSV color space Image <Gray, byte> frame1_hist = new Image <Gray, byte>(f_width, f_height); Image <Gray, byte> frame2_hist = new Image <Gray, byte>(f_width, f_height); CvInvoke.cvCvtColor(frame1_Image, frame1_hist, COLOR_CONVERSION.BGR2GRAY); CvInvoke.cvCvtColor(frame2_Image, frame2_hist, COLOR_CONVERSION.BGR2GRAY); //dispose temp Image after color conversion frame1_Image.Dispose(); frame2_Image.Dispose(); #region Multi-dimentional hitogram (HSV channels) ////set histogram parameters /*int h_bins = 50; int s_bins = 60; * int[] histSize = { h_bins, s_bins }; //int histSize[] = { h_bins, s_bins }; * * float[] h_ranges = { 0, 180 }; * float[] s_ranges = { 0, 256 }; * * //const float[] ranges = { h_ranges, s_ranges }; * float[][] ranges = { h_ranges };*/ //set histogram parameters /*int[] bins = {256, 256, 256}; * RangeF r = new RangeF(0, 256); * RangeF[] ranges = {r, r, r};*/ //DenseHistogram denseHist_frame1 = new DenseHistogram(bins, ranges); //DenseHistogram denseHist_frame2 = new DenseHistogram(bins, ranges);* #endregion #region Single-dimentional (Grayscale) histogram DenseHistogram denseHist_frame1 = new DenseHistogram(256, new RangeF(0.0f, 256.0f)); DenseHistogram denseHist_frame2 = new DenseHistogram(256, new RangeF(0.0f, 256.0f)); denseHist_frame1.Calculate(new Image <Gray, byte>[] { frame1_hist }, true, null); denseHist_frame2.Calculate(new Image <Gray, byte>[] { frame2_hist }, true, null); denseHist_frame1.Normalize(1); denseHist_frame2.Normalize(1); #endregion double histCompareRst = CvInvoke.cvCompareHist(denseHist_frame1, denseHist_frame2, HISTOGRAM_COMP_METHOD.CV_COMP_CORREL); float thresh_min = 0.2f; //get unique keyframes float thresh_max = 0.997f; //good for fast moving objects in video if (histCompareRst < 0.8f) { return(false); //not a match: likely different scenes } return(true); }
private int hack(DenseHistogram[] referenceHistogram, IEnumerable<DenseHistogram[]> testHistograms, HISTOGRAM_COMP_METHOD method, bool printHigh) { var comparisons = testHistograms.Select(x => GetHistogramComparison(x, referenceHistogram, method)); var multiplied = comparisons.Select(x => Math.Abs(x[0] * x[1] * x[2])); var i = 0; foreach (var x in comparisons) { Console.WriteLine("{0,1}: {1:E4} {2:E4} {3:E4} --- {4:E4}", i, x[0], x[1], x[2], multiplied.ToList()[i]); i++; } var minAndMax = findMinAndMax(multiplied); return printHigh ? minAndMax[1] : minAndMax[0]; }
public void run() { if (Thread.CurrentThread.Name.EndsWith("2")) { Thread.Sleep(10000);//MessageBox.Show("play " + Thread.CurrentThread.Name);// // } if (Thread.CurrentThread.Name.EndsWith("3")) { Thread.Sleep(23000); //MessageBox.Show("play " + Thread.CurrentThread.Name);// Thread.Sleep(5000); } try { //if (Thread.CurrentThread.Name.Equals("camera2")) // Thread.Sleep(1000); switch (camno) { case 1: break; case 2: break; case 3: break; default: break; } //Thread Processing while ((currImage = cap.QueryFrame()) != null) { //### updating the currImage every time currImage = currImage.Resize(pictureBox1.Width, pictureBox1.Height, Emgu.CV.CvEnum.INTER.CV_INTER_LINEAR); //### Background Subtraction ~ currImage is subtracted with the bgImage and the result is stored in finalBlobImg. switch (this.state) { case "BGSUB": // Console.WriteLine("_________________"); if (!Main.tracking) //&& Main.lasttrack!=camno { Console.WriteLine("###########" + Thread.CurrentThread.Name + "starts BGSUB"); blobDistanceList = bgSubtraction(currImage, bgImage, ref finalBlobImg, ref blobRect); if (blobDistanceList != null) { foreach (Blob blob in blobDistanceList) { //Console.WriteLine("Blob::" + blob.distance + " " + blob.rect); if (blob.distance < Main.Dis) //set the tracker to cam with same object //&& Main.lasttrack!=camno { track_window_mean = blob.rect; if (blob.rect.Width < 50) { track_window_mean.Width = 60; } else { track_window_mean.Width = blob.rect.Width; } if (blob.rect.Width + blob.rect.X > 319) { track_window_mean.X = track_window_mean.X - 10; track_window_mean.Width = 40; } if (blob.rect.Height + blob.rect.Y > 239) { track_window_mean.Height = track_window_mean.Height - 20; } if (blob.rect.Y == 0) { track_window_mean.Y = blob.rect.Y + 15; } //Console.WriteLine("Blob::::::::" + blob.distance + " " + blob.rect); Main.lasttrack = camno; Main.tracking = true; state = "TRACK"; Console.WriteLine("###########" + Thread.CurrentThread.Name + "starts TRACK"); break; } } } } break; case "TRACK": blobDistanceList = bgSubtraction(currImage, bgImage, ref finalBlobImg, ref blobRect); //-------------- foreach (Blob blob in blobDistanceList) { Console.WriteLine("Tracking Blob== " + blob.distance + " " + blob.rect); } Console.WriteLine("----"); //------------- Image <Hsv, Byte> hsv = new Image <Hsv, Byte>(w, h); hsv = currImage.Convert <Hsv, Byte>(); Console.WriteLine("1"); //extract the hue and value channels Image <Gray, Byte>[] channels = hsv.Split(); //split into components Image <Gray, Byte>[] imghue = new Image <Gray, byte> [1]; imghue[0] = channels[0]; //hsv, so channels[0] is hue. Image <Gray, Byte> imgval = channels[2]; //hsv, so channels[2] is value. Image <Gray, Byte> imgsat = channels[1]; //hsv, so channels[1] is saturation. mask = new Image <Gray, Byte>(w, h); Hsv hsv_lower = new Hsv(0, smin, Math.Min(vmin, vmax)); Hsv hsv_upper = new Hsv(180, 256, Math.Max(vmin, vmax)); mask = hsv.InRange(hsv_lower, hsv_upper); Image <Gray, Byte> backproject = Main.hist.BackProject(imghue); mask = mask.And(finalBlobImg.Dilate(2)); backproject = mask.And(backproject); MCvConnectedComp trac_comp = new MCvConnectedComp(); //Console.WriteLine("2"); MCvTermCriteria criteria_mean = new MCvTermCriteria(100, 0.002); pictureBox2.Image = mask.Bitmap; //Console.WriteLine(criteria_mean.GetType); try { Emgu.CV.CvInvoke.cvMeanShift(backproject, track_window_mean, criteria_mean, out trac_comp); } catch (CvException e) { Console.WriteLine(track_window_mean); MessageBox.Show(e.ToString()); } // Console.WriteLine("3"); currImage.Draw(trac_comp.rect, new Bgr(255, 0, 0), 2); currImage.Draw(new Cross2DF(new PointF((trac_comp.rect.X + trac_comp.rect.Width / 2), (trac_comp.rect.Y + trac_comp.rect.Height / 2)), 20, 20), new Bgr(255, 255, 255), 2); track_window_mean = trac_comp.rect; //check person left the view Image <Gray, byte> subImgBg = bgImgGy.GetSubRect(trac_comp.rect); Image <Gray, byte> subImgFg = currImgGy.GetSubRect(trac_comp.rect); Image <Gray, byte> imMask = subImgFg.AbsDiff(subImgBg); Gray cnt = imMask.GetAverage(); if (cnt.Intensity < 10) { Main.lasttrack = camno; Main.tracking = false; state = "BGSUB"; Console.WriteLine("###########" + Thread.CurrentThread.Name + "switches to BGSUB"); } //--------------------------- outimage = new Image <Gray, byte>(w, h); for (int i = 0; i < trac_comp.rect.Height; i++) { for (int j = 0; j < trac_comp.rect.Width; j++) { //subImgFg.Data[i, j, 0] = 255; if (imMask.Data[i, j, 0] < Main.ThSub) { imMask.Data[i, j, 0] = 0; outimage.Data[i + trac_comp.rect.Y, j + trac_comp.rect.X, 0] = 0; } else { imMask.Data[i, j, 0] = 255; outimage.Data[i + trac_comp.rect.Y, j + trac_comp.rect.X, 0] = 255; } } //Console.WriteLine(); } outimage._Erode(2); outimage._Dilate(3); try { Image <Bgr, byte> subimg = currImage.And((outimage.Convert <Bgr, Byte>())).GetSubRect(trac_comp.rect); //subimg.GetSubRect(subrect);//.Save(Thread.CurrentThread.Name + "\\" + k++ + ".jpg"); //Calc HISTOGRAM of each blob DenseHistogram histBlob = new DenseHistogram(hdims, hranges); //cvCreateHist(1, &hdims, CV_HIST_ARRAY, &hranges, 1); Image <Hsv, byte> hsvBlob = subimg.Convert <Hsv, byte>(); //extract the hue and value channels Image <Gray, Byte>[] channelsBlob = hsvBlob.Split(); //split into components Image <Gray, Byte>[] imghueBlob = new Image <Gray, byte> [1]; imghueBlob[0] = channelsBlob[0]; //hsv, so channels[0] is hue. Image <Gray, Byte> maskBlob = hsvBlob.InRange(hsv_lower, hsv_upper); histBlob.Calculate(imghueBlob, false, maskBlob); double distance = CvInvoke.cvCompareHist(Main.hist.Ptr, histBlob.Ptr, Emgu.CV.CvEnum.HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA); //if (distance < 0.15) //{ // // Main.hist = histBlob; // Console.WriteLine(Thread.CurrentThread.Name + " ===== " + distance); //} } catch (CvException cve) { MessageBox.Show(cve.StackTrace); } //--------------------------- //pictureBox2.Image = mask.Bitmap; //pictureBox3.Image = mask.And(finalBlobImg).Bitmap; break; } pictureBox1.Image = currImage.Bitmap; Thread.Sleep(20); } Console.WriteLine("###########" + Thread.CurrentThread.Name + " exited"); }catch (CvException e) {} }
public static Image <Bgr, Byte> Draw2DHisImg(Image <Bgr, Byte> srcImage, int h_bins, int s_bins) { DenseHistogram histDense = Cal2DHsvHist(srcImage, h_bins, s_bins); return(draw2DHistImg(histDense, 466, 72)); }
/// <summary> /// Recognize gesture. /// </summary> /// <param name="contour">Hand contour</param> /// <param name="fingersCount">Number of fingers</param> /// <returns>Gesture (if any)</returns> public Gesture RecognizeGesture(Image <Gray, byte> contour, int fingersCount) { List <Gesture> recognizedGestures = new List <Gesture>(Gestures); Gesture bestFit = new Gesture(); bestFit.RecognizedData.ContourMatch = 999; bestFit.RecognizedData.HistogramMatch = 999; foreach (var g in recognizedGestures) { if (g.FingersCount != fingersCount) { continue; } using (MemStorage storage = new MemStorage()) { Contour <Point> c1 = contour.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, RETR_TYPE.CV_RETR_LIST, storage); Contour <Point> c2 = g.Image.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, RETR_TYPE.CV_RETR_LIST, storage); if (c1 != null && c2 != null) { DenseHistogram hist1 = new DenseHistogram(new int[2] { 8, 8 }, new RangeF[2] { new RangeF(-180, 180), new RangeF(100, 100) }); DenseHistogram hist2 = new DenseHistogram(new int[2] { 8, 8 }, new RangeF[2] { new RangeF(-180, 180), new RangeF(100, 100) }); CvInvoke.cvCalcPGH(c1, hist1.Ptr); CvInvoke.cvCalcPGH(c2, hist2.Ptr); CvInvoke.cvNormalizeHist(hist1.Ptr, 100.0); CvInvoke.cvNormalizeHist(hist2.Ptr, 100.0); g.RecognizedData.Hand = Hand; g.RecognizedData.HistogramMatch = CvInvoke.cvCompareHist(hist1, hist2, HISTOGRAM_COMP_METHOD.CV_COMP_BHATTACHARYYA); g.RecognizedData.ContourMatch = CvInvoke.cvMatchShapes(c1, c2, CONTOURS_MATCH_TYPE.CV_CONTOURS_MATCH_I3, 0); double rating = g.RecognizedData.ContourMatch * g.RecognizedData.HistogramMatch; double bestSoFar = bestFit.RecognizedData.ContourMatch * bestFit.RecognizedData.HistogramMatch; if (rating < bestSoFar) { bestFit = g; } } } } // Reliable, but strict: 0.01, 0.80, 0.20 if (bestFit.RecognizedData.ContourMatch * bestFit.RecognizedData.HistogramMatch <= 0.0125 && bestFit.RecognizedData.ContourMatch <= 0.80 && bestFit.RecognizedData.HistogramMatch <= 0.20) { return(bestFit); } else { return(null); } }
public void Detection() { try { check = comp1; for (int i = 0; i < dt.Rows.Count; i++) { capturedImg = new Image <Gray, byte>(pbDetectedFace.Image.Bitmap); DBImg = new Image <Gray, byte>((dt.Rows[i].ItemArray[0]).ToString()); hist1 = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); hist2 = new DenseHistogram(256, new RangeF(0.0f, 255.0f)); hist1.Calculate(new Image <Gray, byte>[] { capturedImg }, false, null); hist2.Calculate(new Image <Gray, byte>[] { DBImg }, false, null); mat1 = new Mat(); hist1.CopyTo(mat1); mat2 = new Mat(); hist2.CopyTo(mat2); //float[] histFloat = new float[256]; //hist1.CopyTo(histFloat); //float[] hist2Float = new float[256]; //hist2.CopyTo(hist2Float); //double count1 = 0, count2 = 0; //for (int j = 0; j < 256; j++) //{ // count1 += histFloat[j]; // count2 += hist2Float[j]; //} //unsafe //{ // fixed (char* ch = (dt.Rows[i].ItemArray[0]).ToString().ToCharArray()) // { // fixed (char* ch2 = (dt.Rows[i].ItemArray[0]).ToString().ToCharArray()) // { // OpenCVWrapper.OpencvWrapperClass obj = new OpencvWrapperClass(); // sbyte* pic1 = (sbyte*)ch2; // sbyte* pic2 = (sbyte*)ch; // comp1=obj.CompareHistogram(pic1, pic2); // } // } //} comp1 = CvInvoke.CompareHist(mat1, mat2, Emgu.CV.CvEnum.HistogramCompMethod.Correl); if (comp1 > check && comp1 < 0.40 && comp1 > 0.221) { check = comp1; index = i; } } MessageBox.Show(check + " " + index.ToString()); } catch (IndexOutOfRangeException index) { MessageBox.Show(index.Message); } catch (Exception ee) { MessageBox.Show(ee.Message); } }
public DenseHistogram GetHistogram(Image<Gray, Byte> image) { //Create a histogram DenseHistogram histogram = new DenseHistogram( 256, //number of bins new RangeF(0, 255) //pixel value range ); //Compute histogram histogram.Calculate( new Image<Gray, Byte>[] { image }, //input image true, //If it is true, the histogram is not cleared in the beginning null //no mask is used ); float[] grayHist = new float[256]; //the resulting histogram array histogram.MatND.ManagedArray.CopyTo(grayHist, 0); //copy array //Loop over each bin for (int i = 0; i < 256; i++) { Console.WriteLine("value " + i + " = " + grayHist[i]); } return histogram; }
/// <summary> /// Generate histograms for the image. One histogram is generated for each color channel. /// You will need to call the Refresh function to do the painting afterward. /// </summary> /// <param name="image">The image to generate histogram from</param> /// <param name="numberOfBins">The number of bins for each histogram</param> public void GenerateHistograms(IImage image, int numberOfBins) { IImage[] channels; Type imageType; if ((imageType = Toolbox.GetBaseType(image.GetType(), "Image`2")) != null) { channels = image.Split(); } else if ((imageType = Toolbox.GetBaseType(image.GetType(), "GpuImage`2")) != null) { IImage img = imageType.GetMethod("ToImage").Invoke(image, null) as IImage; channels = img.Split(); } else { throw new ArgumentException("The input image type of {0} is not supported", image.GetType().ToString()); } IColor typeOfColor = Activator.CreateInstance(imageType.GetGenericArguments()[0]) as IColor; String[] channelNames = Reflection.ReflectColorType.GetNamesOfChannels(typeOfColor); Color[] colors = Reflection.ReflectColorType.GetDisplayColorOfChannels(typeOfColor); float minVal, maxVal; #region Get the maximum and minimum color intensity values Type typeOfDepth = imageType.GetGenericArguments()[1]; if (typeOfDepth == typeof(Byte)) { minVal = 0.0f; maxVal = 256.0f; } else { #region obtain the maximum and minimum color value double[] minValues, maxValues; Point[] minLocations, maxLocations; image.MinMax(out minValues, out maxValues, out minLocations, out maxLocations); double min = minValues[0], max = maxValues[0]; for (int i = 1; i < minValues.Length; i++) { if (minValues[i] < min) { min = minValues[i]; } if (maxValues[i] > max) { max = maxValues[i]; } } #endregion minVal = (float)min; maxVal = (float)max; } #endregion for (int i = 0; i < channels.Length; i++) { using (DenseHistogram hist = new DenseHistogram(numberOfBins, new RangeF(minVal, maxVal))) { hist.Calculate(new IImage[1] { channels[i] }, true, null); AddHistogram(channelNames[i], colors[i], hist); } } }
private Image<Gray, Byte> GetBackproject(Image<Gray, Byte> hue, DenseHistogram _hist,Image<Gray,Byte> mask,Rectangle hide) { Image<Gray, Byte> backproject = new Image<Gray, byte>(hue.Width, hue.Height); var imgs = new IntPtr[1] { hue }; Emgu.CV.CvInvoke.cvCalcBackProject(imgs, backproject, _hist); Emgu.CV.CvInvoke.cvAnd(backproject, mask, backproject, IntPtr.Zero); if (th_check) { backproject.ROI = face_rect; if (backproject.GetAverage().Intensity < backproj_threshold/2) { isTracked = false; } th_check = false; Emgu.CV.CvInvoke.cvResetImageROI(backproject); } hide.Height += 50; Emgu.CV.CvInvoke.cvSetImageROI(backproject, hide); try { Emgu.CV.CvInvoke.cvZero(backproject); } catch { } Emgu.CV.CvInvoke.cvResetImageROI(backproject); return backproject; }