private static Gray <byte>[,] QuantizeOrientations(Gray <int>[,] orientDegImg) { var quantizedUnfilteredOrient = new Gray <byte> [orientDegImg.Height(), orientDegImg.Width()]; using (var uOrientDegImg = orientDegImg.Lock()) using (var uQuantizedUnfilteredOrient = quantizedUnfilteredOrient.Lock()) { int * orientDegImgPtr = (int *)uOrientDegImg.ImageData; byte *qOrinetUnfilteredPtr = (byte *)uQuantizedUnfilteredOrient.ImageData; int qOrinetUnfilteredStride = uQuantizedUnfilteredOrient.Stride; int imgWidth = uOrientDegImg.Width; int imgHeight = uOrientDegImg.Height; for (int j = 0; j < imgHeight; j++) { for (int i = 0; i < imgWidth; i++) { int angle = orientDegImgPtr[i]; qOrinetUnfilteredPtr[i] = AngleQuantizationTable[angle]; //[0-360] -> [...] -> [0-7] (for mapping see "CalculateAngleQuantizationTable()") } orientDegImgPtr += imgWidth; //<Gray<int>> is always alligned qOrinetUnfilteredPtr += qOrinetUnfilteredStride; } } //quantizedUnfilteredOrient.Mul(36).Save("quantizedUnfilteredImg.bmp"); return(quantizedUnfilteredOrient); }
private static List <Feature> ExtractTemplate(Gray <byte>[,] orientationImage, int maxNumOfFeatures, Func <Feature, int> featureImportanceFunc) { List <Feature> candidates = new List <Feature>(); using (var uOrientationImage = orientationImage.Lock()) { byte *orientImgPtr = (byte *)uOrientationImage.ImageData; int orientImgStride = uOrientationImage.Stride; int imgWidth = uOrientationImage.Width; int imgHeight = uOrientationImage.Height; for (int row = 0; row < imgHeight; row++) { for (int col = 0; col < imgWidth; col++) { if (orientImgPtr[col] == 0) //quantized orientations are: [1,2,4,8,...,128]; { continue; } var candidate = new Feature(x: col, y: row, angleBinaryRepresentation: orientImgPtr[col]); candidates.Add(candidate); } orientImgPtr += orientImgStride; } } candidates = candidates.OrderByDescending(featureImportanceFunc).ToList(); //order descending return(FilterScatteredFeatures(candidates, maxNumOfFeatures, 5)); //candidates.Count must be >= MIN_NUM_OF_FEATURES }
private static Gray <byte>[,] SpreadOrientations(Gray <byte>[,] quantizedOrientationImage, int neghborhood) { var destImg = quantizedOrientationImage.CopyBlank(); using (var uQuantizedOrientationImage = quantizedOrientationImage.Lock()) using (var uDestImg = destImg.Lock()) { byte *srcImgPtr = (byte *)uQuantizedOrientationImage.ImageData; int imgStride = uQuantizedOrientationImage.Stride; byte *destImgPtr = (byte *)uDestImg.ImageData; int imgHeight = uDestImg.Height; int imgWidth = uDestImg.Width; for (int row = 0; row < neghborhood; row++) { int subImageHeight = imgHeight - row; for (int col = 0; col < neghborhood; col++) { OrImageBits(&srcImgPtr[col], destImgPtr, imgStride, imgWidth - col, subImageHeight); } srcImgPtr += imgStride; } } return(destImg); }
private unsafe void calculateLinearMapForNeighbour(Gray <byte>[,] responseMap, int neigbourRow, int neighbourCol, Gray <byte>[,] linearMap) { using (var uResponseMap = responseMap.Lock()) using (var uLinearMap = linearMap.Lock()) { int neigborhood = this.NeigborhoodSize; byte *linMapPtr = (byte *)uLinearMap.ImageData; int linMapStride = uLinearMap.Stride; int width = uResponseMap.Width; int height = uResponseMap.Height; int stride = uResponseMap.Stride; byte *responseMapPtr = (byte *)uResponseMap.GetData(neigbourRow); //Two loops copy every T-th pixel into the linear memory for (int r = neigbourRow; r < height; r += neigborhood) { int linMapIdx = 0; for (int c = neighbourCol; c < width; c += neigborhood) { linMapPtr[linMapIdx] = responseMapPtr[c]; linMapIdx++; } responseMapPtr += stride * neigborhood; //skip neighborhood rows linMapPtr += linMapStride; } } }
private static List <Point> searchSimilarityMap(Gray <short>[,] similarityMap, int minValue, out List <short> values) { List <Point> positions = new List <Point>(); values = new List <short>(); using (var uSimilarityMap = similarityMap.Lock()) { int width = uSimilarityMap.Width; int height = uSimilarityMap.Height; int stride = uSimilarityMap.Stride; //stride should be == width * sizeof(short) (see linearized maps) short *similarityMapPtr = (short *)uSimilarityMap.ImageData; for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { if (similarityMapPtr[col] >= minValue) { positions.Add(new Point(col, row)); values.Add(similarityMapPtr[col]); } } similarityMapPtr = (short *)((byte *)similarityMapPtr + stride); } } return(positions); }
/// <summary> /// Creates integral image. /// </summary> /// <param name="img">Image.</param> /// <returns>Integral image.</returns> public static Gray<double>[,] MakeIntegral(this Gray<double>[,] img) { var dstImg = new Gray<double>[img.Height() + 1, img.Width() + 1]; using (var uImg = img.Lock()) using (var uDstImg = dstImg.Lock()) { makeIntegral_Double(uImg, uDstImg); } return dstImg; }
/// <summary> /// Creates integral image. /// </summary> /// <param name="img">Image.</param> /// <returns>Integral image.</returns> public static Gray <double>[,] MakeIntegral(this Gray <double>[,] img) { var dstImg = new Gray <double> [img.Height() + 1, img.Width() + 1]; using (var uImg = img.Lock()) using (var uDstImg = dstImg.Lock()) { makeIntegral_Double(uImg, uDstImg); } return(dstImg); }
private Image <Gray <byte> >[,] linearizeResponseMap(Gray <byte>[,] responseMap) { var linearizedMaps = new Image <Gray <byte> > [NeigborhoodSize, NeigborhoodSize]; //Outer two for loops iterate over top-left T^2 starting pixels for (int rowNeigbor = 0; rowNeigbor < this.NeigborhoodSize; rowNeigbor++) { for (int colNeigbor = 0; colNeigbor < this.NeigborhoodSize; colNeigbor++) { var linearMap = new Gray <byte> [this.LinearMapSize.Height, this.LinearMapSize.Width]; calculateLinearMapForNeighbour(responseMap, rowNeigbor, colNeigbor, linearMap); linearizedMaps[rowNeigbor, colNeigbor] = linearMap.Lock(); } } return(linearizedMaps); }
private static Gray <short>[,] calculateSimilarityMap(ITemplate template, LinearizedMaps maps, Rectangle searchArea) { Debug.Assert(searchArea.Right <= maps.ImageSize.Width && searchArea.Bottom <= maps.ImageSize.Height); Debug.Assert(template.Size.Width + searchArea.X < maps.ImageSize.Width && template.Size.Height + searchArea.Y < maps.ImageSize.Height); int width = searchArea.Width / maps.NeigborhoodSize; int height = searchArea.Height / maps.NeigborhoodSize; Gray <short>[,] similarityMap = new Gray <short> [height, width]; //performance penalty (alloc, dealloc)!!! Gray <byte>[,] buffer = new Gray <byte> [height, width]; using (var uSimilarityMap = similarityMap.Lock()) using (var uBuffer = buffer.Lock()) { int nAddsInBuffer = 0; foreach (var feature in template.Features) { var position = new Point(feature.X + searchArea.X, feature.Y + searchArea.Y); //shifted position Point mapPoint; var neighbourMap = maps.GetMapElement(position, feature.AngleIndex, out mapPoint); neighbourMap.AddTo(uBuffer, mapPoint); nAddsInBuffer++; if (nAddsInBuffer / GlobalParameters.MAX_SUPPORTED_NUM_OF_FEATURES_ADDDED_AS_BYTE != 0) { uBuffer.AddTo(uSimilarityMap); buffer.Clear(); //clear buffer nAddsInBuffer = 0; } } bool finalAdd = (template.Features.Length % GlobalParameters.MAX_SUPPORTED_NUM_OF_FEATURES_ADDDED_AS_BYTE != 0); if (finalAdd) { uBuffer.AddTo(uSimilarityMap); } } return(similarityMap); }
/// <summary> /// Computes gradient orientations from the color image. Orientation from the channel which has the maximum gradient magnitude is taken as the orientation for a location. /// </summary> /// <param name="frame">Image.</param> /// <param name="magnitudeSqrImage">Squared magnitude image.</param> /// <param name="minValidMagnitude">Minimal valid magnitude.</param> /// <returns>Orientation image (angles are in degrees).</returns> public unsafe static Gray <int>[,] Compute(Gray <byte>[,] frame, out Gray <int>[,] magnitudeSqrImage, int minValidMagnitude) { var minSqrMagnitude = minValidMagnitude * minValidMagnitude; var orientationImage = new Gray <int> [frame.Height(), frame.Width()]; var _magnitudeSqrImage = orientationImage.CopyBlank(); using (var uFrame = frame.Lock()) { ParallelLauncher.Launch(thread => { computeGray(thread, (byte *)uFrame.ImageData, uFrame.Stride, orientationImage, _magnitudeSqrImage, minSqrMagnitude); }, frame.Width() - 2 * kernelRadius, frame.Height() - 2 * kernelRadius); } magnitudeSqrImage = _magnitudeSqrImage; return(orientationImage); }
/// <summary> /// Computes moments for the provided image. /// </summary> /// <param name="image">Image.</param> /// <param name="area">Area</param> public void Compute(Gray <float>[,] image, Rectangle area) { Reset(); float m00, m01, m10, m11, m02, m20, m12, m21, m30, m03; using (var uImg = image.Lock(area)) { computeFloat(uImg, Point.Empty, Order, out m00, out m01, out m10, out m11, out m02, out m20, out m12, out m21, out m30, out m03); } this.M00 += m00; this.M01 += m01; this.M10 += m10; this.M11 += m11; this.M02 += m02; this.M20 += m20; this.M12 += m12; this.M21 += m21; this.M30 += m30; this.M03 += m03; InvM00 = 1f / M00; CenterX = M10 * InvM00; CenterY = M01 * InvM00; }
private Gray <byte>[,] computeResponseMap(Gray <byte>[,] sprededQuantizedImage, int orientationIndex) { int width = this.ImageValidSize.Width; int height = this.ImageValidSize.Height; var responseMap = new Gray <byte> [height, width]; using (var uSprededQuantizedImage = sprededQuantizedImage.Lock()) { int srcStride = uSprededQuantizedImage.Stride; byte *srcPtr = (byte *)uSprededQuantizedImage.ImageData; using (var uResponseMap = responseMap.Lock()) { int dstStride = uResponseMap.Stride; byte *dstPtr = (byte *)uResponseMap.ImageData; //za sliku fixed(byte *angleTablePtr = &SimilarityAngleTable[0, 0]) { byte *orientPtr = angleTablePtr + orientationIndex * SimilarityAngleTable.GetLength(1); for (int row = 0; row < height; row++) { for (int col = 0; col < width; col++) { //destPtr[col] = SimilarityAngleTable[orient, srcPtr[col]]; dstPtr[col] = orientPtr[srcPtr[col]]; //faster way } srcPtr += srcStride; dstPtr += dstStride; } } } } return(responseMap); }
/// <summary> /// Computes moments for the provided image. /// </summary> /// <param name="image">Image.</param> /// <param name="area">Area</param> public void Compute(Gray<float>[,] image, Rectangle area) { Reset(); float m00, m01, m10, m11, m02, m20, m12, m21, m30, m03; using (var uImg = image.Lock(area)) { computeFloat(uImg, Point.Empty, Order, out m00, out m01, out m10, out m11, out m02, out m20, out m12, out m21, out m30, out m03); } this.M00 += m00; this.M01 += m01; this.M10 += m10; this.M11 += m11; this.M02 += m02; this.M20 += m20; this.M12 += m12; this.M21 += m21; this.M30 += m30; this.M03 += m03; InvM00 = 1f / M00; CenterX = M10 * InvM00; CenterY = M01 * InvM00; }
/// <summary> /// Take only those orientations that have MINIMAL_NUM_OF_SAME_ORIENTED_PIXELS in 3x3 negborhood. /// Performs angle transformation into binary form ([0..7] -> [1, 2, 4, 8, ..., 128]) as well. /// </summary> /// <param name="qunatizedOrientionImg">Quantized orientation image where angles are represented by lables [0..GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS] (invalid orientation label included).</param> /// <param name="minSameOrientations">Minimal number of same orientations for 3x3 neigborhood. The range is: [0..9] (3x3 neigborhood).</param> private static Gray <byte>[,] RetainImportantQuantizedOrientations(Gray <byte>[,] qunatizedOrientionImg, int minSameOrientations) { if (minSameOrientations < 0 || minSameOrientations > 9 /*3x3 neigborhood*/) { throw new Exception("Minimal number of same orientations should be in: [0..9]."); } var quantizedFilteredOrient = qunatizedOrientionImg.CopyBlank(); using (var uQunatizedOrientionImg = qunatizedOrientionImg.Lock()) using (var uQuantizedFilteredOrient = quantizedFilteredOrient.Lock()) { //debugImg = new Image<Hsv, byte>(orientDegImg.Width, orientDegImg.Height); //debugImg = null; int qOrinetStride = uQunatizedOrientionImg.Stride; int qOrinetAllign = uQunatizedOrientionImg.Stride - uQunatizedOrientionImg.Width; byte *qOrinetUnfilteredPtr = (byte *)uQunatizedOrientionImg.ImageData + qOrinetStride + 1; byte *qOrinetFilteredPtr = (byte *)uQuantizedFilteredOrient.ImageData + qOrinetStride + 1; //Debug.Assert(qunatizedOrientionImg.Stride == quantizedFilteredOrient.Stride); int imgWidth = uQunatizedOrientionImg.Width; int imgHeight = uQunatizedOrientionImg.Height; for (int j = 1; j < imgHeight - 1; j++) { for (int i = 1; i < imgWidth - 1; i++) { if (*qOrinetUnfilteredPtr != INVALID_QUANTIZED_ORIENTATION) { byte[] histogram = new byte[INVALID_QUANTIZED_ORIENTATION + 1]; //gleda se susjedstvo 3x3 histogram[qOrinetUnfilteredPtr[-qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 1]]++; histogram[qOrinetUnfilteredPtr[-1]]++; histogram[qOrinetUnfilteredPtr[0]]++; histogram[qOrinetUnfilteredPtr[+1]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 1]]++; int maxBinVotes = 0; byte quantizedAngle = 0; // find the max direction in the 3x3 box for (byte histBinIdx = 0; histBinIdx < GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS /*discard invalid orientation*/; histBinIdx++) { if (histogram[histBinIdx] > maxBinVotes) { maxBinVotes = histogram[histBinIdx]; quantizedAngle = histBinIdx; } } if (maxBinVotes >= minSameOrientations) { *qOrinetFilteredPtr = (byte)(1 << quantizedAngle); //[1,2,4,8...128] (8 orientations) } //*qOrinetFilteredPtr = (byte)(1 << *qOrinetUnfilteredPtr); //[1,2,4,8...128] (8 orientations) //debugImg[j, i] = new Hsv((*qOrinetFilteredPtr-1) * 35, 100, 100); } qOrinetUnfilteredPtr++; qOrinetFilteredPtr++; } qOrinetUnfilteredPtr += 1 + qOrinetAllign + 1; qOrinetFilteredPtr += 1 + qOrinetAllign + 1; //preskoči zadnji piksel, poravnanje, i početni piksel } } //magnitudeImg.Save("magnitude.bmp"); //quantizedFilteredOrient.Save("quantizedImg.bmp"); return(quantizedFilteredOrient); }