public void UpdateRectangles(object sender, FrameProcessedEventArgs e) { if (e.RectangleDictionary.ContainsKey(this)) { faceRectangles = new ConcurrentQueue <Rectangle>(e.RectangleDictionary[this]); } }
internal virtual void FrameProcessed(FrameProcessedEventArgs e) { EventHandler <FrameProcessedEventArgs> handler = OnFrameProcessed; if (handler != null) { handler(this, e); } }
/// <summary> /// 検出値を取得 /// </summary> void FaceModule_FrameProcessed(object sender, FrameProcessedEventArgs args) { FaceData.Update(); var face = FaceData.QueryFaceByIndex(0); if (face != null) { // 検出値 FaceRect = face.Detection.BoundingRect; Landmark = face.Landmarks.Points; FaceExp = face.Expressions.ExpressionResults; // 体位置 BodyPos = SmoothBody.SmoothValue(GetBodyPos(FaceRect)); // 頭角度 HeadAng = SmoothHead.SmoothValue(GetHeadAng(Landmark)); // 視線 EyesPos = SmoothEyes.SmoothValue(GetEyesPos(Landmark)); // 目パチ float eyeL = FaceExp[FaceExpression.EXPRESSION_EYES_CLOSED_LEFT].intensity; float eyeR = FaceExp[FaceExpression.EXPRESSION_EYES_CLOSED_RIGHT].intensity; EyesClose = SmoothEyesClose.SmoothValue(Mathf.Max(eyeL, eyeR)); EyesClose = EyesClose < 50 ? 0 : (EyesClose - 50) * 2; // 眉上 float browRaiL = FaceExp[FaceExpression.EXPRESSION_BROW_RAISER_LEFT].intensity; float browRaiR = FaceExp[FaceExpression.EXPRESSION_BROW_RAISER_RIGHT].intensity; BrowRai = SmoothBrowRai.SmoothValue(Mathf.Max(browRaiL, browRaiR)); // 眉下 float browLowL = FaceExp[FaceExpression.EXPRESSION_BROW_LOWERER_LEFT].intensity; float browLowR = FaceExp[FaceExpression.EXPRESSION_BROW_LOWERER_RIGHT].intensity; BrowLow = SmoothBrowLow.SmoothValue(Mathf.Max(browLowL, browLowR)); // 笑顔 Smile = SmoothSmile.SmoothValue(FaceExp[FaceExpression.EXPRESSION_SMILE].intensity); // キス(口開と若干競合) Kiss = SmoothKiss.SmoothValue(FaceExp[FaceExpression.EXPRESSION_KISS].intensity); // 口開(キスと若干競合) Mouth = SmoothMouth.SmoothValue(FaceExp[FaceExpression.EXPRESSION_MOUTH_OPEN].intensity); // べー(口開と競合) Tongue = SmoothTongue.SmoothValue(FaceExp[FaceExpression.EXPRESSION_TONGUE_OUT].intensity); Ready = true; } }
void OnFrameProcessed(System.Object sender, FrameProcessedEventArgs args) { Seg3D s = (Seg3D)sender; if (s != null) { Image image = s.AcquireSegmentedImage(); if (image != null) { Debug.Log("Color = " + image.Info.width + "x" + image.Info.height); texPlugin.UpdateTextureNative(image, segTex2DPtr); } else { Debug.Log("Image is null."); } } }
private void FrameProcessor(Object data) { FrameProcessorData frameData = (FrameProcessorData)data; FrameProcessedEventArgs args = new FrameProcessedEventArgs(); JsonArray peaks = new JsonArray(); IBuffer frame = Windows.Security.Cryptography.CryptographicBuffer.CreateFromByteArray(frameData.Frame); int width = (int)frameData.PreviewSize.Width; int height = (int)frameData.PreviewSize.Height; byte[] outPixels = _nativeAnalyzer.Analyze(width, height, frame, peaks, _settings.KeepImages); args.Peaks = peaks; args.Timestamp = frameData.Timestamp; if (_settings.KeepImages) { String filename = frameData.Timestamp.ToString().PadLeft(5, '0'); _images.Add(new Helper.ImageInfo(filename, outPixels, width, height)); } FrameProcessed(args); }
void OnFrameProcessed(System.Object sender, FrameProcessedEventArgs args) { Seg3D s = (Seg3D)sender; if (s == null) { Debug.Log("Null"); } else { Image image = s.AcquireSegmentedImage(); if (image != null) { texPlugin.UpdateTextureNative(image, colorTex2DPtr); } else { Debug.Log("Image is null."); } } s.Dispose(); }
void Stack_FrameEncapsulated(object sender, FrameProcessedEventArgs args) { NotifyNext(args.ProcessedFrame); }