/// <summary> /// Update the motion history with the specific image and the specific timestamp /// </summary> /// <param name="foregroundMask">The foreground of the image to be added to history</param> /// <param name="timestamp">The time when the image is captured</param> public void Update(Image<Gray, Byte> foregroundMask, DateTime timestamp) { _lastTime = timestamp; TimeSpan ts = _lastTime.Subtract(_initTime); _foregroundMask = foregroundMask; if (_mhi == null) _mhi = new Image<Gray, float>(foregroundMask.Size); if (_mask == null) _mask = foregroundMask.CopyBlank(); if (_orientation == null) _orientation = new Image<Gray, float>(foregroundMask.Size); CvInvoke.cvUpdateMotionHistory(foregroundMask.Ptr, _mhi, ts.TotalSeconds, _mhiDuration); double scale = 255.0 / _mhiDuration; CvInvoke.cvConvertScale(_mhi.Ptr, _mask.Ptr, scale, (_mhiDuration - ts.TotalSeconds) * scale); CvInvoke.cvCalcMotionGradient(_mhi.Ptr, _mask.Ptr, _orientation.Ptr, _maxTimeDelta, _minTimeDelta, 3); }
/// <summary> /// Update the motion history with the specific image and the specific timestamp /// </summary> /// <param name="image">The image to be added to history</param> /// <param name="timestamp">The time when the image is captured</param> public void Update(Image<Gray, Byte> image, DateTime timestamp) { _lastTime = timestamp; TimeSpan ts = _lastTime.Subtract(_initTime); if (_buffer.Count == _bufferMax) { _buffer.Dequeue(); } _buffer.Enqueue(image); if (_silh == null) _silh = image.CopyBlank(); if (_mhi == null) _mhi = new Image<Gray, float>(image.Size); if (_mask == null) _mask = image.CopyBlank(); if (_orientation == null) _orientation = new Image<Gray, float>(image.Size); CvInvoke.cvAbsDiff(image.Ptr, _buffer.Peek().Ptr, _silh.Ptr); CvInvoke.cvThreshold(_silh.Ptr, _silh.Ptr, _diffThresh, 1, Emgu.CV.CvEnum.THRESH.CV_THRESH_BINARY); CvInvoke.cvUpdateMotionHistory(_silh.Ptr, _mhi, ts.TotalSeconds, _mhiDuration); double scale = 255.0 / _mhiDuration; CvInvoke.cvConvertScale(_mhi.Ptr, _mask.Ptr, scale, (_mhiDuration - ts.TotalSeconds) * scale); CvInvoke.cvCalcMotionGradient(_mhi.Ptr, _mask.Ptr, _orientation.Ptr, _maxTimeDelta, _minTimeDelta, 3); }
/// <summary> /// Take only those orientations that have MINIMAL_NUM_OF_SAME_ORIENTED_PIXELS in 3x3 negborhood. /// Perfroms angle transformation into binary form ([0..7] -> [1, 2, 4, 8, ..., 128]) as well. /// </summary> /// <param name="qunatizedOrientionImg">Quantized orientation image where angles are represented by lables [0..GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS] (invalid orientation label included).</param> /// <param name="minSameOrientations">Minimal number of same orientations for 3x3 neigborhood. The range is: [0..9] (3x3 neigborhood).</param> private static Image<Gray, Byte> RetainImportantQuantizedOrientations(Image<Gray, Byte> qunatizedOrientionImg, int minSameOrientations) { if (minSameOrientations < 0 || minSameOrientations > 9 /*3x3 neigborhood*/) throw new Exception("Minimal number of same orientations should be in: [0..9]."); //debugImg = new Image<Hsv, byte>(orientDegImg.Width, orientDegImg.Height); //debugImg = null; int qOrinetStride = qunatizedOrientionImg.Stride; int qOrinetAllign = qunatizedOrientionImg.Stride - qunatizedOrientionImg.Width; byte* qOrinetUnfilteredPtr = (byte*)qunatizedOrientionImg.ImageData + qOrinetStride + 1; Image<Gray, Byte> quantizedFilteredOrient = qunatizedOrientionImg.CopyBlank(); byte* qOrinetFilteredPtr = (byte*)quantizedFilteredOrient.ImageData + qOrinetStride + 1; //Debug.Assert(qunatizedOrientionImg.Stride == quantizedFilteredOrient.Stride); int imgWidth = qunatizedOrientionImg.Width; int imgHeight = qunatizedOrientionImg.Height; for (int j = 1; j < imgHeight - 1; j++) { for (int i = 1; i < imgWidth - 1; i++) { if (*qOrinetUnfilteredPtr != INVALID_QUANTIZED_ORIENTATION) { byte[] histogram = new byte[INVALID_QUANTIZED_ORIENTATION + 1]; //gleda se susjedstvo 3x3 histogram[qOrinetUnfilteredPtr[-qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[-qOrinetStride + 1]]++; histogram[qOrinetUnfilteredPtr[-1]]++; histogram[qOrinetUnfilteredPtr[0]]++; histogram[qOrinetUnfilteredPtr[+1]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride - 1]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 0]]++; histogram[qOrinetUnfilteredPtr[+qOrinetStride + 1]]++; int maxBinVotes = 0; byte quantizedAngle = 0; for (byte histBinIdx = 0; histBinIdx < GlobalParameters.NUM_OF_QUNATIZED_ORIENTATIONS /*discard invalid orientation*/; histBinIdx++) { if (histogram[histBinIdx] > maxBinVotes) { maxBinVotes = histogram[histBinIdx]; quantizedAngle = histBinIdx; } } if (maxBinVotes >= minSameOrientations) *qOrinetFilteredPtr = (byte)(1 << quantizedAngle); //[1,2,4,8...128] (8 orientations) //*qOrinetFilteredPtr = (byte)(1 << *qOrinetUnfilteredPtr); //[1,2,4,8...128] (8 orientations) //debugImg[j, i] = new Hsv((*qOrinetFilteredPtr-1) * 35, 100, 100); } qOrinetUnfilteredPtr++; qOrinetFilteredPtr++; } qOrinetUnfilteredPtr += 1 + qOrinetAllign + 1; qOrinetFilteredPtr += 1 + qOrinetAllign + 1; //preskoči zadnji piksel, poravnanje, i početni piksel } //magnitudeImg.Save("magnitude.bmp"); //quantizedFilteredOrient.Save("quantizedImg.bmp"); return quantizedFilteredOrient; }
private static Image<Gray, Byte> SpreadOrientations(Image<Gray, Byte> quantizedOrientationImage, int neghborhood) { byte* srcImgPtr = (byte*)quantizedOrientationImage.ImageData; int imgStride = quantizedOrientationImage.Stride; Image<Gray, Byte> destImg = quantizedOrientationImage.CopyBlank(); byte* destImgPtr = (byte*)destImg.ImageData; int imgHeight = destImg.Height; int imgWidth = destImg.Width; for (int row = 0; row < neghborhood; row++) { int subImageHeight = imgHeight - row; for (int col = 0; col < neghborhood; col++) { OrImageBits(&srcImgPtr[col], destImgPtr, imgStride, imgWidth - col, subImageHeight); } srcImgPtr += imgStride; } return destImg; }
private void Process(Image<Gray, byte> left, Image<Gray, byte> right) { if (configuration.CalibrationStatus == CalibrationStatus.Calibrated) { var leftImage = left.CopyBlank(); CvInvoke.Remap(left, leftImage, calibration.LeftMapX, calibration.LeftMapY, Inter.Linear); var rightImage = right.CopyBlank(); CvInvoke.Remap(right, rightImage, calibration.RightMapX, calibration.RightMapY, Inter.Linear); } var disparityMap = disparity.Solve(left, right); UpdateResult(disparityMap); }