public void TestOutputRect()
        {
            int            hr    = 0;
            NormalizedRect rect1 = new NormalizedRect(0.0f, 0.0f, 0.5f, 0.5f);
            NormalizedRect rect2 = new NormalizedRect();

            // Try to configure stream 0 to use upper left quarter screen
            hr = vmrMixerControl.SetOutputRect(0, ref rect1);
            DsError.ThrowExceptionForHR(hr);

            hr = vmrMixerControl.GetOutputRect(0, out rect2);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(rect1 == rect2, "IVMRMixerControl.GetOutputRect / SetOutputRect");

            rect1 = new NormalizedRect(0.5f, 0.5f, 1.0f, 1.0f);

            // Try to configure stream 3 to use lower right quarter screen
            hr = vmrMixerControl.SetOutputRect(3, ref rect1);
            DsError.ThrowExceptionForHR(hr);

            hr = vmrMixerControl.GetOutputRect(3, out rect2);
            DsError.ThrowExceptionForHR(hr);

            Debug.Assert(rect1 == rect2, "IVMRMixerControl.GetOutputRect / SetOutputRect");
        }
 public HandTrackingValue(ClassificationList classificationList, NormalizedLandmarkList landmarkList, NormalizedRect rect, List <Detection> detections)
 {
     Handedness       = classificationList;
     HandLandmarkList = landmarkList;
     HandRect         = rect;
     PalmDetections   = detections;
 }
Example #3
0
        public void ZoomToRect(Rectangle zoomRect)
        {
            NormalizedRect rect = new NormalizedRect();

            if ((zoomRect.Height == 0) || (zoomRect.Width == 0))
            {
                throw new Exception("ZoomRect has zero size.");
            }
            IVMRMixerControl9 vMRenderer = (IVMRMixerControl9)this.DX.VMRenderer;

            if (vMRenderer == null)
            {
                throw new Exception("The Mixer control is not created.");
            }
            float num  = ((float)this._Resolution.Width) / ((float)zoomRect.Width);
            float num2 = ((float)this._Resolution.Height) / ((float)zoomRect.Height);

            rect = new NormalizedRect(-((float)zoomRect.Left) * num, -((float)zoomRect.Top) * num2, (-((float)zoomRect.Right) * num) + (this._Resolution.Width * (num + 1f)), (-((float)zoomRect.Bottom) * num2) + (this._Resolution.Height * (num2 + 1f)))
            {
                left   = rect.left / ((float)this._Resolution.Width),
                right  = rect.right / ((float)this._Resolution.Width),
                top    = rect.top / ((float)this._Resolution.Height),
                bottom = rect.bottom / ((float)this._Resolution.Height)
            };
            vMRenderer.SetOutputRect(0, ref rect);
        }
Example #4
0
        private NormalizedRect GetDestRectangle()
        {
            int            hr = 0;
            int            width, height, arW, arH;
            NormalizedRect rect = new NormalizedRect();

            hr = windowlessCtrl.GetNativeVideoSize(out width, out height, out arW, out arH);
            DsError.ThrowExceptionForHR(hr);

            // Position the bitmap in the middle of the video stream.
            if (width >= height)
            {
                rect.top    = 0.0f;
                rect.left   = (1.0f - ((float)height / (float)width)) / 2;
                rect.bottom = 1.0f;
                rect.right  = rect.left + (float)height / (float)width;
            }
            else
            {
                rect.top    = (1.0f - ((float)width / (float)height)) / 2;
                rect.left   = 0.0f;
                rect.right  = rect.top + (float)width / (float)height;
                rect.bottom = 1.0f;
            }

            return(rect);
        }
Example #5
0
 public void Draw(NormalizedRect target)
 {
     if (ActivateFor(target))
     {
         Draw(GetScreenRect().GetRectVertices(target, rotationAngle, isMirrored));
     }
 }
Example #6
0
 public PoseTrackingValue(Detection poseDetection, NormalizedLandmarkList poseLandmarks, LandmarkList poseWorldLandmarks, NormalizedRect roiFromLandmarks)
 {
     this.poseDetection      = poseDetection;
     this.poseLandmarks      = poseLandmarks;
     this.poseWorldLandmarks = poseWorldLandmarks;
     this.roiFromLandmarks   = roiFromLandmarks;
 }
Example #7
0
 public void Draw(Transform screenTransform, ClassificationList handedness, NormalizedLandmarkList handLandmarkList,
                  NormalizedRect handRect, List <Detection> palmDetections, bool isFlipped = false)
 {
     handednessAnnotation.GetComponent <ClassificationAnnotationController>().Draw(screenTransform, handedness);
     handLandmarkListAnnotation.GetComponent <HandLandmarkListAnnotationController>().Draw(screenTransform, handLandmarkList, isFlipped);
     handRectAnnotation.GetComponent <RectAnnotationController>().Draw(screenTransform, handRect, isFlipped);
     palmDetectionsAnnotation.GetComponent <DetectionListAnnotationController>().Draw(screenTransform, palmDetections, isFlipped);
 }
 public HolisticValue(NormalizedLandmarkList PoseLandmarks, NormalizedRect PoseRoi, Detection PoseDetection,
                      NormalizedLandmarkList FaceLandmarks, NormalizedLandmarkList LeftHandLandmarks, NormalizedLandmarkList RightHandLandmarks)
 {
     this.PoseLandmarks      = PoseLandmarks;
     this.PoseRoi            = PoseRoi;
     this.PoseDetection      = PoseDetection;
     this.FaceLandmarks      = FaceLandmarks;
     this.LeftHandLandmarks  = LeftHandLandmarks;
     this.RightHandLandmarks = RightHandLandmarks;
 }
Example #9
0
 public HolisticTrackingValue(Detection poseDetection, NormalizedLandmarkList poseLandmarks,
                              NormalizedLandmarkList faceLandmarks, NormalizedLandmarkList leftHandLandmarks, NormalizedLandmarkList rightHandLandmarks,
                              NormalizedLandmarkList leftIrisLandmarks, NormalizedLandmarkList rightIrisLandmarks,
                              LandmarkList poseWorldLandmarks, NormalizedRect poseRoi)
 {
     this.poseDetection      = poseDetection;
     this.poseLandmarks      = poseLandmarks;
     this.faceLandmarks      = faceLandmarks;
     this.leftIrisLandmarks  = leftIrisLandmarks;
     this.rightIrisLandmarks = rightIrisLandmarks;
     this.leftHandLandmarks  = leftHandLandmarks;
     this.rightHandLandmarks = rightHandLandmarks;
     this.poseWorldLandmarks = poseWorldLandmarks;
     this.poseRoi            = poseRoi;
 }
        protected override IEnumerator WaitForNextValue()
        {
            List <Detection>       faceDetections        = null;
            NormalizedRect         faceRect              = null;
            NormalizedLandmarkList faceLandmarksWithIris = null;

            if (runningMode == RunningMode.Sync)
            {
                var _ = graphRunner.TryGetNext(out faceDetections, out faceRect, out faceLandmarksWithIris, true);
            }
            else if (runningMode == RunningMode.NonBlockingSync)
            {
                yield return(new WaitUntil(() => graphRunner.TryGetNext(out faceDetections, out faceRect, out faceLandmarksWithIris, false)));
            }

            _faceDetectionsAnnotationController.DrawNow(faceDetections);
            _faceRectAnnotationController.DrawNow(faceRect);
            _faceLandmarksWithIrisAnnotationController.DrawNow(faceLandmarksWithIris);
        }
Example #11
0
        //public Bitmap GenerateAlphaBitmap()
        //{
        //    // Alpha values
        //    int alpha50 = (int)(255 * 0.50f); // 50% opacity

        //    // Some drawing tools needed later
        //    Pen blackBorder = new Pen(Color.Black, 2.0f);
        //    Brush red50 = new SolidBrush(Color.FromArgb(alpha50, Color.Red));
        //    Font font = new Font("Tahoma", 16);

        //    int w = _Camera.OutputVideoSize.Width;
        //    int h = _Camera.OutputVideoSize.Height;

        //    // Create a ARGB bitmap
        //    Bitmap bmp = new Bitmap(w, h, PixelFormat.Format32bppArgb);
        //    Graphics g = Graphics.FromImage(bmp);

        //    // Do antialiased drawings
        //    g.SmoothingMode = SmoothingMode.AntiAlias;
        //    g.TextRenderingHint = TextRenderingHint.AntiAlias;

        //    // Clear the bitmap with complete transparency
        //    g.Clear(Color.Transparent);

        //    // Draw a green circle with black border in the middle
        //    //g.FillEllipse(green, 320 * w / 640, 240 * h / 480, 155 * w / 640, 155 * h / 480);
        //    g.FillEllipse(red50, w / 2 - 70, h / 2 - 70, 140, 140);
        //    g.DrawEllipse(blackBorder, w / 2 - 70, h / 2 - 70, 140, 140);


        //    // Release GDI+ objects
        //    blackBorder.Dispose();
        //    red50.Dispose();
        //    g.Dispose();

        //    // return the bitmap
        //    return bmp;
        //}
        #endregion

        // Swritch GDI/D3D
        //private void button3_Click(object sender, EventArgs e)
        //{
        //    cameraControl.MixerEnabled = false;

        //    cameraControl.UseGDI = !cameraControl.UseGDI;
        //    UpdateCameraBitmap();

        //    if (!cameraControl.MixerEnabled)
        //        cameraControl.MixerEnabled = true;
        //}

        #endregion

        #region Mouse selection stuff



        private void cameraControl_MouseDown(object sender, MouseEventArgs e)
        {
            if (e.Button != MouseButtons.Left)
            {
                return;
            }

            if (!cameraControl.CameraCreated)
            {
                return;
            }

            if (_bZoomed)
            {
                return;
            }

            PointF point = cameraControl.ConvertWinToNorm(new PointF(e.X, e.Y));

            _MouseSelectionRect = new NormalizedRect(point.X, point.Y, point.X, point.Y);

            _bDrawMouseSelection = true;
            UpdateCameraBitmap();
        }
        private void cameraControl_MouseDown(object sender, MouseEventArgs e)
        {
            if (e.Button != MouseButtons.Left)
                return;

            if (!cameraControl.CameraCreated)
                return;

            if (_bZoomed)
                return;

            PointF point = cameraControl.ConvertWinToNorm(new PointF(e.X, e.Y));
            _MouseSelectionRect = new NormalizedRect(point.X, point.Y, point.X, point.Y);

            _bDrawMouseSelection = true;
            UpdateCameraBitmap();
        }
Example #13
0
 /// <summary>
 ///   Get a Vector3 array which represents <paramref name="normalizedRect" />'s vertex coordinates in local coordinate system.
 ///   They are ordered clockwise from bottom-left point.
 /// </summary>
 /// <param name="rectTransform">
 ///   <see cref="RectTransform" /> to be used for calculating local coordinates
 /// </param>
 /// <param name="imageRotation">Counterclockwise rotation angle of the input image</param>
 /// <param name="isMirrored">Set to true if the original coordinates is mirrored</param>
 public static Vector3[] GetRectVertices(this RectTransform rectTransform, NormalizedRect normalizedRect, RotationAngle imageRotation = RotationAngle.Rotation0, bool isMirrored = false)
 {
     return(GetRotatedRectVerticesNormalized(rectTransform, normalizedRect.XCenter, normalizedRect.YCenter, normalizedRect.Width, normalizedRect.Height, normalizedRect.Rotation, imageRotation, isMirrored));
 }
Example #14
0
 public void DrawNow(NormalizedRect target)
 {
     currentTarget = target;
     SyncNow();
 }
Example #15
0
 void OnFaceRectOutput(NormalizedRect faceRect)
 {
     faceRectAnnotationController.DrawLater(faceRect);
 }
Example #16
0
        private void BuildMosaicGraph(ITuningSpace tuningSpace, ArrayList programs)
        {
            this.graphBuilder = (IFilterGraph2)new FilterGraph();
            rot = new DsROTEntry(this.graphBuilder);

            // Method names should be self explanatory
            AddNetworkProviderFilter(tuningSpace);
            AddMPEG2DemuxFilter();

            AddAndConnectBDABoardFilters();
            AddTransportStreamFiltersToGraph();
            AddRenderers();

            //unsafe
            //{
                IntPtr formatPtr = Marshal.AllocHGlobal(g_Mpeg2ProgramVideo.Length);
                Marshal.Copy(g_Mpeg2ProgramVideo, 0, formatPtr, g_Mpeg2ProgramVideo.Length);

                IMpeg2Demultiplexer mpeg2Demultiplexer = this.mpeg2Demux as IMpeg2Demultiplexer;

                for(int p = 1; p < programs.Count; p++)
                {
                    PSI.PSIPMT pmt = (PSI.PSIPMT)programs[p];
                    PSI.PSIPMT.Data stream = (PSI.PSIPMT.Data)pmt.GetStreamByType(CodeTV.PSI.STREAM_TYPES.STREAMTYPE_13818_VIDEO);

                    AMMediaType mediaType = new AMMediaType();
                    mediaType.majorType = MediaType.Video;
                    mediaType.subType = MediaSubType.Mpeg2Video;
                    mediaType.fixedSizeSamples = false;
                    mediaType.temporalCompression = false;
                    mediaType.sampleSize = 0;
                    mediaType.formatType = FormatType.Mpeg2Video;
                    mediaType.unkPtr = IntPtr.Zero;

                    mediaType.formatSize = g_Mpeg2ProgramVideo.Length;
                    mediaType.formatPtr = formatPtr;

                    //mediaType.formatType = FormatType.Mpeg2Video;
                    //mediaType.formatSize = 0;
                    //mediaType.formatPtr = IntPtr.Zero;

                    string pinName = "video" + p;
                    IPin outputPin;
                    int hr = mpeg2Demultiplexer.CreateOutputPin(mediaType, pinName, out outputPin);
                    if (outputPin != null)
                    {
                        IMPEG2PIDMap mpeg2PIDMap = outputPin as IMPEG2PIDMap;
                        if (mpeg2PIDMap != null)
                            hr = mpeg2PIDMap.MapPID(1, new int[] { stream.Pid }, MediaSampleContent.ElementaryStream);
                        Marshal.ReleaseComObject(outputPin);
                    }
                }
                Marshal.FreeHGlobal(formatPtr);
            //}

            ConfigureVMR9InWindowlessMode(programs.Count);
            ConnectAllOutputFilters();

            int numberColumn = 4;
            int numberRow = 4;
            float widthPadding = 0.01f;
            float heightPadding = 0.01f;

            float width = (1.0f / numberColumn) - 2.0f * widthPadding;
            float height = (1.0f / numberRow) - 2.0f * heightPadding;

            IVMRMixerControl9 vmrMixerControl9 = this.videoRenderer as IVMRMixerControl9;
            for (int p = 1; p < programs.Count; p++)
            {
                int column, row = Math.DivRem(p - 1, numberColumn, out column);
                NormalizedRect rect = new NormalizedRect();
                rect.left = (float)column / (float)numberColumn + widthPadding;
                rect.top = (float)row / (float)numberRow + heightPadding;
                rect.right = rect.left + width;
                rect.bottom = rect.top + height;
                vmrMixerControl9.SetOutputRect(p, ref rect);
            }
        }
Example #17
0
 public void DrawLater(NormalizedRect target)
 {
     UpdateCurrentTarget(target, ref currentTarget);
 }
 public IrisTrackingValue(NormalizedLandmarkList landmarkList, NormalizedRect rect) : this(landmarkList, rect, new List <Detection>())
 {
 }
Example #19
0
        private void BuildMosaicGraph(ITuningSpace tuningSpace, ArrayList programs)
        {
            this.graphBuilder = (IFilterGraph2) new FilterGraph();
            rot = new DsROTEntry(this.graphBuilder);

            // Method names should be self explanatory
            AddNetworkProviderFilter(tuningSpace);
            AddMPEG2DemuxFilter();

            AddAndConnectBDABoardFilters();
            AddTransportStreamFiltersToGraph();
            AddRenderers();

            //unsafe
            //{
            IntPtr formatPtr = Marshal.AllocHGlobal(g_Mpeg2ProgramVideo.Length);

            Marshal.Copy(g_Mpeg2ProgramVideo, 0, formatPtr, g_Mpeg2ProgramVideo.Length);

            IMpeg2Demultiplexer mpeg2Demultiplexer = this.mpeg2Demux as IMpeg2Demultiplexer;

            for (int p = 1; p < programs.Count; p++)
            {
                PSI.PSIPMT      pmt    = (PSI.PSIPMT)programs[p];
                PSI.PSIPMT.Data stream = (PSI.PSIPMT.Data)pmt.GetStreamByType(CodeTV.PSI.STREAM_TYPES.STREAMTYPE_13818_VIDEO);

                AMMediaType mediaType = new AMMediaType();
                mediaType.majorType           = MediaType.Video;
                mediaType.subType             = MediaSubType.Mpeg2Video;
                mediaType.fixedSizeSamples    = false;
                mediaType.temporalCompression = false;
                mediaType.sampleSize          = 0;
                mediaType.formatType          = FormatType.Mpeg2Video;
                mediaType.unkPtr = IntPtr.Zero;

                mediaType.formatSize = g_Mpeg2ProgramVideo.Length;
                mediaType.formatPtr  = formatPtr;

                //mediaType.formatType = FormatType.Mpeg2Video;
                //mediaType.formatSize = 0;
                //mediaType.formatPtr = IntPtr.Zero;

                string pinName = "video" + p;
                IPin   outputPin;
                int    hr = mpeg2Demultiplexer.CreateOutputPin(mediaType, pinName, out outputPin);
                if (outputPin != null)
                {
                    IMPEG2PIDMap mpeg2PIDMap = outputPin as IMPEG2PIDMap;
                    if (mpeg2PIDMap != null)
                    {
                        hr = mpeg2PIDMap.MapPID(1, new int[] { stream.Pid }, MediaSampleContent.ElementaryStream);
                    }
                    Marshal.ReleaseComObject(outputPin);
                }
            }
            Marshal.FreeHGlobal(formatPtr);
            //}

            ConfigureVMR9InWindowlessMode(programs.Count);
            ConnectAllOutputFilters();

            int   numberColumn  = 4;
            int   numberRow     = 4;
            float widthPadding  = 0.01f;
            float heightPadding = 0.01f;

            float width  = (1.0f / numberColumn) - 2.0f * widthPadding;
            float height = (1.0f / numberRow) - 2.0f * heightPadding;

            IVMRMixerControl9 vmrMixerControl9 = this.videoRenderer as IVMRMixerControl9;

            for (int p = 1; p < programs.Count; p++)
            {
                int            column, row = Math.DivRem(p - 1, numberColumn, out column);
                NormalizedRect rect = new NormalizedRect();
                rect.left   = (float)column / (float)numberColumn + widthPadding;
                rect.top    = (float)row / (float)numberRow + heightPadding;
                rect.right  = rect.left + width;
                rect.bottom = rect.top + height;
                vmrMixerControl9.SetOutputRect(p, ref rect);
            }
        }
Example #20
0
 void OnPoseRoiOutput(NormalizedRect roiFromLandmarks)
 {
     poseRoiAnnotationController.DrawLater(roiFromLandmarks);
 }
Example #21
0
        public PointF ConvertWinToNorm(PointF point)
        {
            NormalizedRect videoRect = this.GetVideoRect();

            return(new PointF((point.X - videoRect.left) / (videoRect.right - videoRect.left), (point.Y - videoRect.top) / (videoRect.bottom - videoRect.top)));
        }
Example #22
0
 public IrisTrackingValue(List <Detection> faceDetections, NormalizedRect faceRect, NormalizedLandmarkList faceLandmarksWithIris)
 {
     this.faceRect              = faceRect;
     this.faceDetections        = faceDetections;
     this.faceLandmarksWithIris = faceLandmarksWithIris;
 }
 /// <summary>
 ///   Get a Vector3 array which represents <paramref name="normalizedRect" />'s vertex coordinates in the local coordinate system.
 ///   They are ordered clockwise from bottom-left point.
 /// </summary>
 /// <param name="rectangle">Rectangle to get a point inside</param>
 /// <param name="imageRotation">
 ///   Counterclockwise rotation angle of the input image in the image coordinate system.
 ///   In the local coordinate system, this value will often represent a clockwise rotation angle.
 /// </param>
 /// <param name="isMirrored">Set to true if the original coordinates is mirrored</param>
 public static Vector3[] GetRectVertices(this UnityEngine.Rect rectangle, NormalizedRect normalizedRect,
                                         RotationAngle imageRotation = RotationAngle.Rotation0, bool isMirrored = false)
 {
     return(ImageNormalizedToRectVertices(rectangle, normalizedRect.XCenter, normalizedRect.YCenter, normalizedRect.Width, normalizedRect.Height, normalizedRect.Rotation, imageRotation, isMirrored));
 }
 public IrisTrackingValue(NormalizedLandmarkList landmarkList, NormalizedRect rect, List <Detection> detections)
 {
     FaceLandmarksWithIris = landmarkList;
     FaceRect       = rect;
     FaceDetections = detections;
 }
Example #25
0
        /// <summary>
        /// Sets camera output rect (zooms to selected rect).
        /// </summary>
        /// <param name="zoomRect">Rectangle for zooming in video coordinates.</param>
        public void ZoomToRect(Rectangle zoomRect)
        {
            if (zoomRect.Height == 0 || zoomRect.Width == 0)
                throw new Exception(@"ZoomRect has zero size.");

            IVMRMixerControl9 pMix = (IVMRMixerControl9)DX.VMRenderer;

            if (pMix == null)
                throw new Exception(@"The Mixer control is not created.");

            float x_scale = (float)_Resolution.Width / zoomRect.Width;
            float y_scale = (float)_Resolution.Height / zoomRect.Height;

            NormalizedRect rect = new NormalizedRect
                (
                -(float)zoomRect.Left * x_scale,
                -(float)zoomRect.Top * y_scale,
                -(float)zoomRect.Right * x_scale + _Resolution.Width * (x_scale + 1),
                -(float)zoomRect.Bottom * y_scale + _Resolution.Height * (y_scale + 1)
                );


            rect.left /= _Resolution.Width;
            rect.right /= _Resolution.Width;
            rect.top /= _Resolution.Height;
            rect.bottom /= _Resolution.Height;

            //NormalizedRect rect = new NormalizedRect(-1, -1, 2, 2);

            //NormalizedRect rect = new NormalizedRect(
            //    (float)zoomRect.Left / _Resolution.Width,
            //    (float)zoomRect.Top / _Resolution.Height,
            //    (float)zoomRect.Right / _Resolution.Width,
            //    (float)zoomRect.Bottom / _Resolution.Height);

            pMix.SetOutputRect(0, ref rect);
        }
Example #26
0
        void initGraph(Rectangle rect, IntPtr hwnd)
        {
            log.writeLog("Init Graph");
            pGB           = (IGraphBuilder) new FilterGraph();
            pVmr          = (IBaseFilter) new VideoMixingRenderer9();
            compressVideo = CreateFilter(FilterCategory.VideoCompressorCategory, lstCompressor[3]);
            pGB.AddFilter(pVmr, "Video");
            pGB.AddFilter(captureVideo, "VideoCapture");
            //pGB.AddFilter(compressVideo, "Encoder");

            pConfig = (IVMRFilterConfig9)pVmr;
            pConfig.SetRenderingMode(VMR9Mode.Windowless);
            pWC = (IVMRWindowlessControl9)pVmr;

            pWC.SetVideoPosition(null, DsRect.FromRectangle(rect));
            pWC.SetVideoClippingWindow(hwnd);

            pMix = (IVMRMixerControl9)pVmr;
            pMs  = (IMediaSeeking)pGB;
            pMC  = (IMediaControl)pGB;

            ICaptureGraphBuilder2 cc = (ICaptureGraphBuilder2) new CaptureGraphBuilder2();

            cc.SetFiltergraph(pGB);
            pGB.AddFilter(devices[0], "Camera-1");
            if (devices[1] != null)
            {
                pGB.AddFilter(devices[1], "Camera-2");
            }
            pGB.AddFilter(audioCapture, "Audio Capture");

            Rectangle win = rect;
            float     _w  = win.Width;
            float     _H  = win.Height;

            NormalizedRect _0rect = new NormalizedRect();

            _0rect.top    = win.Top;
            _0rect.left   = win.Left;
            _0rect.right  = (win.Left + win.Width / 2) / _w;
            _0rect.bottom = (win.Bottom / _H);

            NormalizedRect _1rect = new NormalizedRect();

            _1rect.top    = win.Top;
            _1rect.left   = (win.Left + win.Width / 2) / _w;;
            _1rect.right  = win.Right / _w;
            _1rect.bottom = win.Bottom / _H;

            pMix.SetOutputRect(0, _0rect);
            pMix.SetOutputRect(1, _1rect);

            int             hr   = 0;
            IFileSinkFilter sink = null;

            log.writeLog("SetOutputFileName create");
            hr = cc.SetOutputFileName(MediaSubType.Avi, "VideoCaptured.avi", out captureVideo, out sink);
            DsError.ThrowExceptionForHR(hr);
            log.writeLog("SetOutputFileName success");
            log.writeLog("Start create cam-1 to preview");
            hr = cc.RenderStream(PinCategory.Preview, MediaType.Video, devices[0], null, pVmr);
            DsError.ThrowExceptionForHR(hr);
            log.writeLog("Start cam-1 to preview success");
            if (devices[1] != null)
            {
                log.writeLog("Start create cam-2 to preview");
                hr = cc.RenderStream(PinCategory.Preview, MediaType.Video, devices[1], null, pVmr);
                DsError.ThrowExceptionForHR(hr);
                log.writeLog("Create cam-2 to preview success");
            }
            log.writeLog("Start capture video from cam-1");
            hr = cc.RenderStream(PinCategory.Capture, MediaType.Video, devices[0], null, captureVideo);
            DsError.ThrowExceptionForHR(hr);
            log.writeLog("success create capture from cam-1");
            log.writeLog("Start capture audio");
            hr = cc.RenderStream(PinCategory.Capture, MediaType.Audio, audioCapture, null, captureVideo);
            DsError.ThrowExceptionForHR(hr);
            log.writeLog("Success to capture audio");

            Marshal.ReleaseComObject(cc);
        }
Example #27
0
        private NormalizedRect GetDestRectangle()
        {
            int hr = 0;
            int width, height, arW, arH;
            NormalizedRect rect = new NormalizedRect();

            hr = windowlessCtrl.GetNativeVideoSize(out width, out height, out arW, out arH);
            DsError.ThrowExceptionForHR(hr);

            // Position the bitmap in the middle of the video stream.
            if (width >= height)
            {
                rect.top = 0.0f;
                rect.left = (1.0f - ((float)height / (float)width)) / 2;
                rect.bottom = 1.0f;
                rect.right = rect.left + (float)height / (float)width;
            }
            else
            {
                rect.top = (1.0f - ((float)width / (float)height)) / 2;
                rect.left = 0.0f;
                rect.right = rect.top + (float)width / (float)height;
                rect.bottom = 1.0f;
            }

            return rect;
        }
 public HandTrackingValue(ClassificationList classificationList, NormalizedLandmarkList landmarkList, NormalizedRect rect) :
     this(classificationList, landmarkList, rect, new List <Detection>())
 {
 }
    public void Draw(Transform screenTransform, NormalizedLandmarkList poseLandmarks, NormalizedRect poseRoi, Detection poseDetection,
                     NormalizedLandmarkList faceLandmarks, NormalizedLandmarkList leftHandLandmarks, NormalizedLandmarkList rightHandLandmarks, bool isFlipped = false)
    {
        poseLandmarksAnnotation.GetComponent <FullBodyPoseLandmarkListAnnotationController>().Draw(screenTransform, poseLandmarks, isFlipped);
        poseRoiAnnotation.GetComponent <RectAnnotationController>().Draw(screenTransform, poseRoi, isFlipped);
        poseDetectionAnnotation.GetComponent <DetectionAnnotationController>().Draw(screenTransform, poseDetection, isFlipped);
        faceLandmarksAnnotation.GetComponent <FaceLandmarkListAnnotationController>().Draw(screenTransform, faceLandmarks, isFlipped);
        leftHandLandmarksAnnotation.GetComponent <HandLandmarkListAnnotationController>().Draw(screenTransform, leftHandLandmarks, isFlipped);
        rightHandLandmarksAnnotation.GetComponent <HandLandmarkListAnnotationController>().Draw(screenTransform, rightHandLandmarks, isFlipped);

        if (faceLandmarks.Landmark.Count == 0)
        {
            ClearIrisAnnotations();
        }
        else if (faceLandmarks.Landmark.Count > 468)
        {
            var leftIrisLandmarks = GetIrisLandmarks(faceLandmarks, Side.Left);
            leftIrisLandmarksAnnotation.GetComponent <IrisAnnotationController>().Draw(screenTransform, leftIrisLandmarks, isFlipped);

            var rightIrisLandmarks = GetIrisLandmarks(faceLandmarks, Side.Right);
            rightIrisLandmarksAnnotation.GetComponent <IrisAnnotationController>().Draw(screenTransform, rightIrisLandmarks, isFlipped);
        }
    }
Example #30
0
 void OnRoiFromLandmarksOutput(NormalizedRect roiFromLandmarks)
 {
     roiFromLandmarksAnnotationController.DrawLater(roiFromLandmarks);
 }