public void TransactResult(MLAnalyzer.Result result) { if (!liveObjectAnalyseActivity.mlsNeedToDetect) { return; } this.liveObjectAnalyseActivity.mOverlay.Clear(); SparseArray objectSparseArray = result.AnalyseList; for (int i = 0; i < objectSparseArray.Size(); i++) { MLObjectGraphic graphic = new MLObjectGraphic(liveObjectAnalyseActivity.mOverlay, ((MLObject)(objectSparseArray.ValueAt(i)))); liveObjectAnalyseActivity.mOverlay.Add(graphic); } // When you need to implement a scene that stops after recognizing specific content // and continues to recognize after finishing processing, refer to this code for (int i = 0; i < objectSparseArray.Size(); i++) { if (((MLObject)(objectSparseArray.ValueAt(i))).TypeIdentity == MLObject.TypeFood) { liveObjectAnalyseActivity.mlsNeedToDetect = true; liveObjectAnalyseActivity.mHandler.SendEmptyMessage(LiveObjectAnalyseActivity.StopPreview); } } }
public void TransactResult(MLAnalyzer.Result result) { this.imgLiveAnalyseActivity.mOverlay.Clear(); SparseArray imageSegmentationResult = result.AnalyseList; MLSegmentGraphic graphic = new MLSegmentGraphic(this.imgLiveAnalyseActivity.mPreview, this.imgLiveAnalyseActivity.mOverlay, (MLImageSegmentation)imageSegmentationResult.ValueAt(0), this.imgLiveAnalyseActivity.isFront); this.imgLiveAnalyseActivity.mOverlay.Add(graphic); }
public void TransactResult(MLAnalyzer.Result result) { this.mGraphicOverlay.Clear(); SparseArray faceSparseArray = result.AnalyseList; for (int i = 0; i < faceSparseArray.Size(); i++) { MLFaceGraphic graphic = new MLFaceGraphic(this.mGraphicOverlay, (MLFace)faceSparseArray.ValueAt(i)); this.mGraphicOverlay.Add(graphic); } }
/// <summary> /// Implemented from MLAnalyzer.IMLTransactor interface. /// Process the results returned by the analyzer. /// </summary> public void TransactResult(MLAnalyzer.Result result) { mOverlay.Clear(); SparseArray imageSegmentationResult = result.AnalyseList; IList <MLSceneDetection> list = new List <MLSceneDetection>(); for (int i = 0; i < imageSegmentationResult.Size(); i++) { list.Add((MLSceneDetection)imageSegmentationResult.ValueAt(i)); } MLSceneDetectionGraphic sceneDetectionGraphic = new MLSceneDetectionGraphic(mOverlay, list); mOverlay.Add(sceneDetectionGraphic); mOverlay.PostInvalidate(); }
public void TransactResult(MLAnalyzer.Result result) { this.mGraphicOverlay.Clear(); SparseArray handKeypointsSparseArray = result.AnalyseList; List <MLHandKeypoints> list = new List <MLHandKeypoints>(); for (int i = 0; i < handKeypointsSparseArray.Size(); i++) { list.Add((MLHandKeypoints)handKeypointsSparseArray.ValueAt(i)); } HandKeypointGraphic graphic = new HandKeypointGraphic(this.mGraphicOverlay, list); this.mGraphicOverlay.Add(graphic); }
public void TransactResult(MLAnalyzer.Result result) { this.mGraphicOverlay.Clear(); SparseArray sparseArray = result.AnalyseList; List <MLSkeleton> list = new List <MLSkeleton>(); for (int i = 0; i < sparseArray.Size(); i++) { list.Add((MLSkeleton)sparseArray.ValueAt(i)); } // Remove invalid point. List <MLSkeleton> skeletons = SkeletonUtils.GetValidSkeletons(list); SkeletonGraphic graphic = new SkeletonGraphic(this.mGraphicOverlay, skeletons); this.mGraphicOverlay.Add(graphic); if (mActivity != null && !mActivity.IsFinishing) { mActivity.CompareSimilarity(skeletons); } }