public List <int> AcceptedWindows(Image <Gray, byte> frame, List <int> scanningWindows) { _acceptedWindows.Clear(); IBoundingBox[] allScanningWindows = _scanningWindowGenerator.ScanningWindows; _relativeSimilarities.Clear(); _patches.Clear(); CurrentState.RelativeSimilarities.Clear(); for (int i = 0; i < scanningWindows.Count; i++) { // get relative similarity of the patch to the object model int windowIndex = scanningWindows[i]; IBoundingBox bb = allScanningWindows[windowIndex]; Image <Gray, byte> patch = Service.GeneratePatch(frame, bb, null, CurrentState.ObjectModelGaussianSigma, _objectModel.PatchSize); float pnn, nnn; float relativeSimilarity = _objectModel.RelativeSimilarity(patch, out pnn, out nnn); if (pnn > _pnnSimilarityThreshold && relativeSimilarity > _relativeSimilarityThreshold) { _acceptedWindows.Add(windowIndex); } CurrentState.RelativeSimilarities.Add(bb, relativeSimilarity); _patches.Add(bb, patch); } frame.ROI = Rectangle.Empty; return(_acceptedWindows); }
public void TrainDetector(Image <Gray, byte> currentFrame, IBoundingBox currentBb, out bool valid) { // if patch is outside the image if (!currentBb.InsideFrame(currentFrame.Size)) { valid = false; return; } // get normalized image patch currentFrame.ROI = Rectangle.Round(currentBb.GetRectangle()); Image <Gray, byte> currentPatch = currentFrame.Resize(_objectModel.PatchSize.Width, _objectModel.PatchSize.Height, INTER.CV_INTER_LINEAR); currentFrame.ROI = Rectangle.Empty; // if appearance changed too fast float pnnSimilarity, nnnSimilarity; if (_objectModel.RelativeSimilarity(currentPatch, out pnnSimilarity, out nnnSimilarity) < 0.5f) { valid = false; return; } if (nnnSimilarity > _sameSimilarityThreshold) { valid = false; return; } #region ensemble classifier // generate positive patches List <Image <Gray, byte> > positivePatchesForEnsemble = GeneratePatches( _ensembleClassifier.SmoothFrame, currentBb, _runtimePosPatchSynthesisInfo.EnsembleCount, _runtimePosPatchSynthesisInfo.WarpInfo, 0, Size.Empty ); // pick negative patches List <IBoundingBox> allEnsembleNegativeBbs = new List <IBoundingBox>(); foreach (KeyValuePair <IBoundingBox, double> pair in _ensembleClassifier.PositivePosteriors) { IBoundingBox bb = pair.Key; if (bb.GetOverlap(currentBb) < _runtimeNegativePatchPickingInfo.Overlap) { allEnsembleNegativeBbs.Add(bb); } } List <Image <Gray, byte> > negativePatchesForEnsemble = PickBoundingBoxesAndGeneratePatches( _ensembleClassifier.SmoothFrame, allEnsembleNegativeBbs, _runtimeNegativePatchPickingInfo.EnsembleCount, Size.Empty ); // train ensemble classifier int bootstrap = 1; for (int i = 0; i < bootstrap; i++) { _ensembleClassifier.TrainWithUnseenPatches(positivePatchesForEnsemble, negativePatchesForEnsemble); } #endregion #region nn classifier // generate positive patches List <Image <Gray, byte> > positivePatchesForNn = GeneratePatches( currentFrame, currentBb, _runtimePosPatchSynthesisInfo.NnCount, _runtimePosPatchSynthesisInfo.WarpInfo, _runtimePosPatchSynthesisInfo.GaussianSigma, _objectModel.PatchSize ); int width = _objectModel.PatchSize.Width; int height = _objectModel.PatchSize.Height; for (int i = 0; i < positivePatchesForNn.Count; i++) { positivePatchesForNn[i] = positivePatchesForNn[i].Resize(width, height, INTER.CV_INTER_LINEAR); } /* * // pick negative patches * List<IBoundingBox> allNegativeNnBbs = new List<IBoundingBox>(); * IBoundingBox[] allBoundingBoxes = _nnClassifier.ScanningWindowGenerator.ScanningWindows; * List<int> nnAcceptedPatches = _nnClassifier.AcceptedPatches; * int nnAcceptedPatchesCount = nnAcceptedPatches.Count; * * for (int i = 0; i < nnAcceptedPatchesCount; i++) * { * int windowIndex = nnAcceptedPatches[i]; * IBoundingBox bb = allBoundingBoxes[windowIndex]; * if (bb.GetOverlap(currentBb) < _runtimeNegativePatchPickingInfo.Overlap) * { * allNegativeNnBbs.Add(bb); * } * } * * List<Image<Gray, byte>> negativePatchesForNn = PickBoundingBoxesAndGeneratePatches( * currentFrame, * allNegativeNnBbs, * _runtimeNegativePatchPickingInfo.NnCount, * _objectModel.PatchSize * ); */ List <Image <Gray, byte> > negativePatchesForNn = PickFromList <Image <Gray, byte> >(negativePatchesForEnsemble, _runtimeNegativePatchPickingInfo.NnCount); for (int i = 0; i < negativePatchesForNn.Count; i++) { negativePatchesForNn[i] = negativePatchesForNn[i].Resize(_objectModel.PatchSize.Width, ObjectModel.PatchSize.Height, INTER.CV_INTER_LINEAR); } // train nn classifier (update object model) TrainNnClassifier(positivePatchesForNn, negativePatchesForNn); #endregion valid = true; currentFrame.ROI = Rectangle.Empty; }
public IBoundingBox DetermineBoundingBox(IBoundingBox trackerBoundingBox, List <IBoundingBox> detectorBoundingBoxes, Image <Gray, byte> frame, out bool reinitializeTracker, bool prevValid, out bool currValid) { reinitializeTracker = false; currValid = false; /* If neither the tracker nor the detector output a bounding box, * the object is declared as not visible */ if (trackerBoundingBox == null && detectorBoundingBoxes == null) { return(null); } // calculate relative similarities for all bounding boxes Dictionary <IBoundingBox, float> relativeSimilarities = new Dictionary <IBoundingBox, float>(); List <IBoundingBox> boundingBoxes = new List <IBoundingBox>(); if (trackerBoundingBox != null) { boundingBoxes.Add(trackerBoundingBox); } if (detectorBoundingBoxes != null) { boundingBoxes.AddRange(detectorBoundingBoxes); } float maxConservativeSimilarity = -1; IBoundingBox maxConfidentBoundingBox = null; foreach (IBoundingBox boundingBox in boundingBoxes) { frame.ROI = Rectangle.Round(boundingBox.GetRectangle()); Image <Gray, byte> patch = frame.Resize(_objectModel.PatchSize.Width, _objectModel.PatchSize.Height, INTER.CV_INTER_LINEAR); float conservativeSimilarity = _objectModel.RelativeSimilarity(patch); relativeSimilarities.Add(boundingBox, conservativeSimilarity); if (conservativeSimilarity > maxConservativeSimilarity) { maxConservativeSimilarity = conservativeSimilarity; maxConfidentBoundingBox = boundingBox; } } frame.ROI = Rectangle.Empty; IBoundingBox output = null; // if tracker is defined if (trackerBoundingBox != null) { output = trackerBoundingBox; if (detectorBoundingBoxes != null) { // get detector bounding boxes // that are more confident then the tracker // and have sufficient rel. similarity List <IBoundingBox> bbs = new List <IBoundingBox>(); foreach (IBoundingBox bb in detectorBoundingBoxes) { if (relativeSimilarities[bb] > relativeSimilarities[trackerBoundingBox] && relativeSimilarities[bb] > 0.55f) { bbs.Add(bb); } } // if there is only only one such bounding box, // reinitialize the tracker if (bbs.Count == 1) { output = bbs[0]; reinitializeTracker = true; } } } // if tracker is not defined else { if (detectorBoundingBoxes != null) { // get detector bounding boxes // that have sufficient rel. similarity List <IBoundingBox> bbs = new List <IBoundingBox>(); foreach (IBoundingBox bb in detectorBoundingBoxes) { if (relativeSimilarities[bb] > 0.55f) { bbs.Add(bb); } } // if there is only only one such bounding box, // reinitialize the tracker if (bbs.Count == 1) { output = bbs[0]; reinitializeTracker = true; } } } return(output); }
public IBoundingBox DetermineBoundingBox(IBoundingBox trackerBoundingBox, List <IBoundingBox> detectorBoundingBoxes, Image <Gray, byte> frame, out bool reinitializeTracker, bool prevValid, out bool currValid) { IBoundingBox output = null; reinitializeTracker = false; currValid = false; /* [Zdenek] If neither the tracker nor the detector output a bounding box, * the object is declared as not visible */ if (trackerBoundingBox == null && detectorBoundingBoxes == null) { return(null); } // calculate relative similarities for all bounding boxes Dictionary <IBoundingBox, float> relativeSimilarities = new Dictionary <IBoundingBox, float>(); // -> tracker if (trackerBoundingBox != null) { frame.ROI = Rectangle.Round(trackerBoundingBox.GetRectangle()); Size patchSize = _objectModel.PatchSize; Image <Gray, byte> trackerPatch = frame.Resize(patchSize.Width, patchSize.Height, INTER.CV_INTER_LINEAR); relativeSimilarities.Add(trackerBoundingBox, _objectModel.RelativeSimilarity(trackerPatch)); frame.ROI = Rectangle.Empty; } // -> detector if (detectorBoundingBoxes != null) { for (int i = 0; i < detectorBoundingBoxes.Count; i++) { IBoundingBox bb = detectorBoundingBoxes[i]; relativeSimilarities.Add(bb, CurrentState.RelativeSimilarities[bb]); } } // if tracker is defined if (trackerBoundingBox != null) { output = trackerBoundingBox; if (detectorBoundingBoxes != null) { float bigOverlap = 0.8f; // get detector bounding boxes // that are far from tracker // and are more confident then the tracker List <IBoundingBox> bbs = new List <IBoundingBox>(); foreach (IBoundingBox bb in detectorBoundingBoxes) { if (bb.GetOverlap(trackerBoundingBox) < bigOverlap && relativeSimilarities[bb] > relativeSimilarities[trackerBoundingBox]) { bbs.Add(bb); } } // if there is only only one such bounding box, // reinitialize the tracker if (bbs.Count == 1) { output = bbs[0]; reinitializeTracker = true; currValid = false; } // otherwise calculate weighted average with close detections else { int trackerRepeat = 1; PointF center = trackerBoundingBox.Center.Multiply(trackerRepeat, trackerRepeat); SizeF size = trackerBoundingBox.Size.Multiply(trackerRepeat, trackerRepeat); // consider detector bounding boxes that are close to the tracker int bbCount = trackerRepeat; foreach (IBoundingBox bb in detectorBoundingBoxes) { if (bb.GetOverlap(trackerBoundingBox) >= bigOverlap) { center = center.Add(bb.Center); size = size.Add(bb.Size); bbCount++; } } output = trackerBoundingBox.CreateInstance( center.Divide(bbCount, bbCount), size.Divide(bbCount, bbCount) ); } } } // if tracker is not defined else { if (detectorBoundingBoxes != null) { List <IBoundingBox> suppressedBbs = Service.NonMaximalBoundingBoxSuppress(detectorBoundingBoxes); // if there is a single detection, reinitialize the tracker if (suppressedBbs.Count == 1) { output = suppressedBbs[0]; reinitializeTracker = true; currValid = false; } } } if (output != null) { if (prevValid == true) { currValid = true; } else { Image <Gray, byte> outputPatch = frame.GetPatch(output.Center, Size.Round(output.Size)) .Resize(_objectModel.PatchSize.Width, _objectModel.PatchSize.Height, INTER.CV_INTER_LINEAR); float pnn, nnn; float conservativeSimilarity = _objectModel.ConservativeSimilarity(outputPatch, out pnn, out nnn); if (pnn > 0.8f && conservativeSimilarity >= ConsSimValidThreshold) { currValid = true; } } } return(output); }