コード例 #1
0
ファイル: EVR.cs プロジェクト: rjshaver/emby-theater-windows
 public void CopyFrom(MFVideoNormalizedRect from)
 {
     left   = from.left;
     top    = from.top;
     right  = from.right;
     bottom = from.bottom;
 }
コード例 #2
0
ファイル: EVR.cs プロジェクト: rjshaver/emby-theater-windows
        public override bool Equals(object obj)
        {
            if (obj is MFVideoNormalizedRect)
            {
                MFVideoNormalizedRect cmp = (MFVideoNormalizedRect)obj;

                return(right == cmp.right && bottom == cmp.bottom && left == cmp.left && top == cmp.top);
            }

            return(false);
        }
コード例 #3
0
ファイル: Presenter.cs プロジェクト: GoshaDE/SuperMFLib
        protected int m_TokenCounter; // Counter. Incremented whenever we create new samples.

        #endregion Fields

        #region Constructors

        /// <summary>
        /// Constructor
        /// </summary>
        public EVRCustomPresenter()
        {
            if (System.Threading.Thread.CurrentThread.GetApartmentState() != System.Threading.ApartmentState.MTA)
            {
                throw new Exception("Unsupported theading model");
            }

            m_iDiscarded = 0;
            m_pClock = null;
            m_pMixer = null;
            m_pMediaEventSink = null;
            m_h2 = null;
            m_pMediaType = null;

            m_bSampleNotify = false;
            m_bRepaint = false;
            m_bEndStreaming = false;
            m_bPrerolled = false;

            m_RenderState = RenderState.Shutdown;
            m_fRate = 1.0f;
            m_TokenCounter = 0;

            m_pD3DPresentEngine = new D3DPresentEngine();
            m_FrameStep = new FrameStep();            // Frame-stepping information.

            m_nrcSource = new MFVideoNormalizedRect(0.0f, 0.0f, 1.0f, 1.0f);
            m_scheduler = new Scheduler(D3DPresentEngine.PRESENTER_BUFFER_COUNT, m_pD3DPresentEngine);          // Manages scheduling of samples.
            m_SamplePool = new SamplePool(D3DPresentEngine.PRESENTER_BUFFER_COUNT);           // Pool of allocated samples.

            // Force load of mf.dll now, rather than when we try to start streaming
            DllCanUnloadNow();
        }
コード例 #4
0
ファイル: Presenter.cs プロジェクト: GoshaDE/SuperMFLib
        public int GetVideoPosition(MFVideoNormalizedRect pnrcSource, MFRect prcDest)
        {
            // Make sure we *never* leave this entry point with an exception
            try
            {
                lock (this)
                {
                    if (pnrcSource == null || prcDest == null)
                    {
                        throw new COMException("EVRCustomPresenter::GetVideoPosition", E_Pointer);
                    }

                    pnrcSource.CopyFrom(m_nrcSource);
                    prcDest.CopyFrom(m_pD3DPresentEngine.GetDestinationRect());
                }

                return S_Ok;
            }
            catch (Exception e)
            {
                return Marshal.GetHRForException(e);
            }
        }
コード例 #5
0
ファイル: Presenter.cs プロジェクト: GoshaDE/SuperMFLib
        protected void SetMixerSourceRect(IMFTransform pMixer, MFVideoNormalizedRect nrcSource)
        {
            if (pMixer == null)
            {
                throw new COMException("SetMixerSourceRect", E_Pointer);
            }

            int hr;
            IMFAttributes pAttributes = null;

            hr = pMixer.GetAttributes(out pAttributes);
            MFError.ThrowExceptionForHR(hr);

            Utils.MFSetBlob(pAttributes, MFAttributesClsid.VIDEO_ZOOM_RECT, nrcSource);

            SafeRelease(pAttributes); pAttributes = null;
        }
コード例 #6
0
ファイル: Presenter.cs プロジェクト: GoshaDE/SuperMFLib
        public int SetVideoPosition(MFVideoNormalizedRect pnrcSource, MFRect prcDest)
        {
            // Make sure we *never* leave this entry point with an exception
            try
            {
                lock (this)
                {

                    // Validate parameters.

                    // One parameter can be NULL, but not both.
                    if (pnrcSource == null && prcDest == null)
                    {
                        throw new COMException("EVRCustomPresenter::SetVideoPosition", E_Pointer);
                    }

                    // Validate the rectangles.
                    if (pnrcSource != null)
                    {
                        // The source rectangle cannot be flipped.
                        if ((pnrcSource.left > pnrcSource.right) ||
                            (pnrcSource.top > pnrcSource.bottom))
                        {
                            throw new COMException("Bad source", E_InvalidArgument);
                        }

                        // The source rectangle has range (0..1)
                        if ((pnrcSource.left < 0) || (pnrcSource.right > 1) ||
                            (pnrcSource.top < 0) || (pnrcSource.bottom > 1))
                        {
                            throw new COMException("source has invalid values", E_InvalidArgument);
                        }
                    }

                    if (prcDest != null)
                    {
                        // The destination rectangle cannot be flipped.
                        if ((prcDest.left > prcDest.right) ||
                            (prcDest.top > prcDest.bottom))
                        {
                            throw new COMException("bad destination", E_InvalidArgument);
                        }
                    }

                    // Update the source rectangle. Source clipping is performed by the mixer.
                    if (pnrcSource != null)
                    {
                        m_nrcSource.CopyFrom(pnrcSource);

                        if (m_pMixer != null)
                        {
                            SetMixerSourceRect(m_pMixer, m_nrcSource);
                        }
                    }

                    // Update the destination rectangle.
                    if (prcDest != null)
                    {
                        MFRect rcOldDest = m_pD3DPresentEngine.GetDestinationRect();

                        // Check if the destination rectangle changed.
                        if (!rcOldDest.Equals(prcDest))
                        {
                            m_pD3DPresentEngine.SetDestinationRect(prcDest);

                            // Set a new media type on the mixer.
                            if (m_pMixer != null)
                            {
                                try
                                {
                                    RenegotiateMediaType();
                                }
                                catch (COMException e)
                                {
                                    if (e.ErrorCode == MFError.MF_E_TRANSFORM_TYPE_NOT_SET)
                                    {
                                        // This error means that the mixer is not ready for the media type.
                                        // Not a failure case -- the EVR will notify us when we need to set
                                        // the type on the mixer.
                                    }
                                    else
                                    {
                                        throw;
                                    }
                                }
                                // The media type changed. Request a repaint of the current frame.
                                m_bRepaint = true;
                                ProcessOutput(); // Ignore errors, the mixer might not have a video frame.
                            }
                        }
                    }
                }

                return S_Ok;
            }
            catch (Exception e)
            {
                return Marshal.GetHRForException(e);
            }
        }
コード例 #7
0
        void TestSetVideoPosition()
        {
            MFVideoNormalizedRect r1 = new MFVideoNormalizedRect();
            MFRect r2 = new MFRect();

            r1.bottom = 1.0f;
            r1.right = 0.9f;

            r2.bottom = 234;
            r2.right = 345;

            int hr = m_vdc.SetVideoPosition(r1, r2);
            MFError.ThrowExceptionForHR(hr);

            MFVideoNormalizedRect r3 = new MFVideoNormalizedRect();
            MFRect r4 = new MFRect();

            hr = m_vdc.GetVideoPosition(r3, r4);
            MFError.ThrowExceptionForHR(hr);
        }
コード例 #8
0
ファイル: MainForm.cs プロジェクト: babgvant/EVRPlay
        internal float ChangeZoom(float adjustAmount)
        {
            if (evrDisplay != null)
            {
                zoomAmt += adjustAmount;

                if (zoomAmt < 1.0f)
                    zoomAmt = 1.0f;

                MFVideoNormalizedRect sRect = new MFVideoNormalizedRect();

                if (zoomAmt == 1.0)
                {
                    sRect.top = 0;
                    sRect.left = 0;
                    sRect.right = 1;
                    sRect.bottom = 1;
                }
                else
                {
                    float fMargin = (0.5f - (0.5f / zoomAmt));

                    sRect.top = fMargin;
                    sRect.left = fMargin;
                    sRect.right = (1.0f - fMargin);
                    sRect.bottom = (1.0f - fMargin);
                }

                this.evrDisplay.SetVideoPosition(sRect, null);
            }
            return zoomAmt;
        }
コード例 #9
0
ファイル: GraphBuilderBase.cs プロジェクト: dgis/CodeTV
        //protected virtual Rectangle GetInnerRectangle(Rectangle outerRectangle, double innerRatio)
        //{
        //    Rectangle innerRectangle = outerRectangle;
        //    double outerRatio = (double)outerRectangle.Width / (double)outerRectangle.Height;
        //    if (outerRatio >= innerRatio)
        //    {
        //        innerRectangle.Width = (int)((double)outerRectangle.Height * innerRatio);
        //        innerRectangle.X = (int)((outerRectangle.Width - innerRectangle.Width) / 2.0);
        //    }
        //    else
        //    {
        //        innerRectangle.Height = (int)((double)outerRectangle.Width / innerRatio);
        //        innerRectangle.Y = (int)((outerRectangle.Height - innerRectangle.Height) / 2.0);
        //    }
        //    return innerRectangle;
        //}
        protected Rectangle[] GetBlackBands()
        {
            Rectangle outerRectangle = this.hostingControl.ClientRectangle;
            DsRect innerDsRect = new DsRect();
            int hr;
            if (useEVR)
            {
                MFVideoNormalizedRect pnrcSource = new MFVideoNormalizedRect();
                MediaFoundation.Misc.MFRect prcDest = new MediaFoundation.Misc.MFRect();
                hr = evrVideoDisplayControl.GetVideoPosition(pnrcSource, prcDest);
                innerDsRect = DsRect.FromRectangle((Rectangle)prcDest);
            }
            else
            {
                IVMRWindowlessControl9 vmrWindowlessControl9 = this.videoRenderer as IVMRWindowlessControl9;
                hr = vmrWindowlessControl9.GetVideoPosition(null, innerDsRect);
            }
            Rectangle innerRectangle = innerDsRect.ToRectangle();

            //Trace.WriteLineIf(trace.TraceVerbose, string.Format(("\tvideoRenderer.GetVideoPosition({0})"), innerRectangle.ToString()));
            //Trace.WriteLineIf(trace.TraceVerbose, string.Format(("\thostingControl.ClientRectangle({0})"), outerRectangle.ToString()));

            List<Rectangle> alRectangles = new List<Rectangle>();

            if (innerRectangle.Top > outerRectangle.Top)
                alRectangles.Add(new Rectangle(outerRectangle.Left, outerRectangle.Top, outerRectangle.Width - 1, innerRectangle.Top - 1));

            if (innerRectangle.Bottom < outerRectangle.Bottom)
                alRectangles.Add(new Rectangle(outerRectangle.Left, innerRectangle.Bottom, outerRectangle.Width - 1, outerRectangle.Height - (innerRectangle.Bottom + 1)));

            if (innerRectangle.Left > outerRectangle.Left)
            {
                Rectangle rectangleLeft = new Rectangle(outerRectangle.Left, innerRectangle.Top, innerRectangle.Left - 1, innerRectangle.Height - 1);
                rectangleLeft.Intersect(outerRectangle);
                alRectangles.Add(rectangleLeft);
            }

            if (innerRectangle.Right < outerRectangle.Right)
            {
                Rectangle rectangleLeft = new Rectangle(innerRectangle.Right, innerRectangle.Top, outerRectangle.Width - (innerRectangle.Right + 1), innerRectangle.Height - 1);
                rectangleLeft.Intersect(outerRectangle);
                alRectangles.Add(rectangleLeft);
            }
            return alRectangles.ToArray();
        }
コード例 #10
0
 public void CopyFrom(MFVideoNormalizedRect from)
 {
     left = from.left;
     top = from.top;
     right = from.right;
     bottom = from.bottom;
 }
コード例 #11
0
ファイル: SLMedia.cs プロジェクト: babaq/StiLib
        /// <summary>
        /// Resize video
        /// </summary>
        /// <param name="width"></param>
        /// <param name="height"></param>
        /// <returns></returns>
        public int ResizeVideo(short width, short height)
        {
            TRACE(string.Format("ResizeVideo: {0}x{1}", width, height));

            int hr = S_Ok;
            if (m_pVideoDisplay != null)
            {
                try
                {
                    MFRect rcDest = new MFRect();
                    MFVideoNormalizedRect nRect = new MFVideoNormalizedRect();

                    nRect.left = 0;
                    nRect.right = 1;
                    nRect.top = 0;
                    nRect.bottom = 1;
                    rcDest.left = 0;
                    rcDest.top = 0;
                    rcDest.right = width;
                    rcDest.bottom = height;

                    m_pVideoDisplay.SetVideoPosition(nRect, rcDest);
                }
                catch (Exception e)
                {
                    hr = Marshal.GetHRForException(e);
                }
            }

            return hr;
        }
コード例 #12
0
ファイル: DShowPlayer.cs プロジェクト: adambyram/pimaker
        public void Track(Point pt)
        {
            if (m_pMixer == null || m_pMapper == null)
            {
                throw new COMException("null mixer or mapper", MFError.MF_E_INVALIDREQUEST);
            }

            Rectangle r = m_hwndVideo.ClientRectangle;
            MFRect rc = new MFRect(r.Left, r.Top, r.Right, r.Bottom);

            // x, y: Mouse coordinates, normalized relative to the composition rectangle.
            float x = (float)pt.X / rc.right;
            float y = (float)pt.Y / rc.bottom;

            // Map the mouse coordinates to the reference stream.
            m_pMapper.MapOutputCoordinateToInputStream(
                x, y,       // Output coordinates
                0,          // Output stream (the mixer only has one)
                0,          // Input stream (0 = ref stream)
                out x, out y      // Receives the normalized input coordinates.
                );

            // Offset by the original hit point.
            x -= m_ptHitTrack.X;
            y -= m_ptHitTrack.Y;

            float max_offset = 1.0f - m_fScale; // Maximum left and top positions for the substream.

            MFVideoNormalizedRect nrcDest = new MFVideoNormalizedRect();

            if (x < 0)
            {
                nrcDest.left = 0;
                nrcDest.right = m_fScale;
            }
            else if (x > max_offset)
            {
                nrcDest.right = 1;
                nrcDest.left = max_offset;
            }
            else
            {
                nrcDest.left = x;
                nrcDest.right = x + m_fScale;
            }

            if (y < 0)
            {
                nrcDest.top = 0;
                nrcDest.bottom = m_fScale;
            }
            else if (y > max_offset)
            {
                nrcDest.bottom = 1;
                nrcDest.top = max_offset;
            }
            else
            {
                nrcDest.top = y;
                nrcDest.bottom = y + m_fScale;
            }

            // Set the new position.
            m_pMixer.SetStreamOutputRect(1, nrcDest);
        }
コード例 #13
0
ファイル: DShowPlayer.cs プロジェクト: adambyram/pimaker
        public void SetScale(float fScale)
        {
            if (m_ImageHandlers == null || m_ImageHandlers.Length < 2)
            {
                return;
            }
            if (fScale < 0 || fScale > 1.0)
            {
                throw new COMException("Invalid scale", E_InvalidArgument);
            }

            if (fScale == m_fScale)
            {
                return; // no-op
            }

            if (m_pMixer == null)
            {
                throw new COMException("No mixer", E_InvalidArgument);
            }

            // Get the current position of the substream rectangle.
            MFVideoNormalizedRect rect = new MFVideoNormalizedRect();

            m_pMixer.GetStreamOutputRect(1, rect);

            // When this method is called, the substream might be positioned anywhere
            // within the composition rectangle. To resize it, first we scale the
            // right/bottom edges up to the maximum, and then scale the left/top edges.
            rect.right = Math.Min(rect.left + fScale, 1.0f);
            rect.bottom = Math.Min(rect.top + fScale, 1.0f);

            rect.left -= Math.Max(fScale - (rect.right - rect.left), 0.0f);
            rect.top -= Math.Max(fScale - (rect.bottom - rect.top), 0.0f);

            // Set the new position.
            m_pMixer.SetStreamOutputRect(1, rect);

            m_fScale = fScale;
        }
コード例 #14
0
ファイル: Player.cs プロジェクト: babgvant/EVRPlay
        private void MoveVideoWindow()
        {
            //int hr = 0;

            // Track the movement of the container window and resize as needed
            if (this.evrDisplay != null)
            {
                MFVideoNormalizedRect sRect = new MFVideoNormalizedRect();
                sRect.top = 0;
                sRect.left = 0;
                sRect.right = 1;
                sRect.bottom = 1;
                MediaFoundation.Misc.MFRect dRect = new MediaFoundation.Misc.MFRect();
                //dRect.top = 0;
                //dRect.left = 0;
                //dRect.right = ClientRectangle.Width;//this.Width;
                //dRect.bottom = ClientRectangle.Height;//this.Height;
                dRect.top = 0 - (ps.OverscanHeight / 2);
                dRect.left = 0 - (ps.OverscanWidth / 2);
                dRect.right = container.ClientRectangle.Width + (ps.OverscanWidth / 2);//this.Width;
                dRect.bottom = container.ClientRectangle.Height + (ps.OverscanHeight / 2);//this.Height;
                this.evrDisplay.SetVideoPosition(sRect, dRect);
            }
        }
コード例 #15
0
ファイル: MainForm.cs プロジェクト: babgvant/EVRPlay
        private void menuItem14_Click(object sender, EventArgs e)
        {
            MFVideoAlphaBitmap alphaBmp = new MFVideoAlphaBitmap();

            using (Bitmap alphaBitmap = new Bitmap("epsonproj.png"))
            {

                //alphaBitmap is a 32bit semitransparent Bitmap
                Graphics g = Graphics.FromImage(alphaBitmap);

                // get pointer to needed objects
                IntPtr hdc = g.GetHdc();
                IntPtr memDC = CreateCompatibleDC(hdc);
                IntPtr hBitmap = alphaBitmap.GetHbitmap();
                IntPtr hOld = SelectObject(memDC, hBitmap);

                alphaBmp.GetBitmapFromDC = true;
                alphaBmp.stru = memDC;
                alphaBmp.paras = new MFVideoAlphaBitmapParams();
                alphaBmp.paras.dwFlags = MFVideoAlphaBitmapFlags.Alpha | MFVideoAlphaBitmapFlags.DestRect;

                // calculate destination rectangle
                MFVideoNormalizedRect mfNRect = new MFVideoNormalizedRect();
                //NormalizedRect nRect = GetDestRectangle(width, height, subtitleLines);

                mfNRect.top = 0.5f;// nRect.top;
                mfNRect.left = 0.5f;// nRect.left;
                mfNRect.right = 1.0f;//nRect.right;
                mfNRect.bottom = 1.0f;// nRect.bottom;

                // used when viewing half side by side anaglyph video that is stretched to full width
                //if (FrameMode == Mars.FrameMode.HalfSideBySide)
                //{
                //    mfNRect.left /= 2;
                //    mfNRect.right /= 2;
                //}

                alphaBmp.paras.nrcDest = mfNRect;

                // calculate source rectangle (full subtitle bitmap)
                MFRect rcSrc = new MFRect();
                rcSrc.bottom = alphaBitmap.Height;
                rcSrc.right = alphaBitmap.Width;
                rcSrc.top = 0;
                rcSrc.left = 0;

                alphaBmp.paras.rcSrc = rcSrc;

                // apply 1-bit transparency 
                //System.Drawing.Color colorKey = System.Drawing.Color.White;
                //alphaBmp.paras.clrSrcKey = ColorTranslator.ToWin32(colorKey);

                // 90% visible
                alphaBmp.paras.fAlpha = 0.5F;

                // set the bitmap to the evr mixer
                mixBmp.SetAlphaBitmap(alphaBmp);

                // cleanup
                SelectObject(memDC, hOld);
                DeleteDC(memDC);
                g.ReleaseHdc();
            }
        }
コード例 #16
0
ファイル: GraphBuilderBase.cs プロジェクト: dgis/CodeTV
        // Idea from Gabest (http://www.gabest.org) and modify by me
        public virtual void VideoResizer(VideoSizeMode videoZoomMode, bool keepAspectRatio, PointF offset, double zoom, double aspectRatioFactor)
        {
            Trace.WriteLineIf(trace.TraceInfo, "VideoResizer(...)");
            int hr = 0;

            Rectangle windowRect = this.hostingControl.ClientRectangle;
            currentVideoTargetRectangle = windowRect;
            currentVideoSourceSize = new Size();

            FilterState filterState = GetGraphState();
            if (filterState == FilterState.Paused || filterState == FilterState.Running)
            {
                if (videoZoomMode != VideoSizeMode.StretchToWindow)
                {
                    int arX, arY;
                    int arX2 = 0, arY2 = 0;

                    if (useEVR)
                    {
                        Size videoSize = new Size(), arVideoSize = new Size();
                        hr = evrVideoDisplayControl.GetNativeVideoSize(out videoSize, out arVideoSize);
                        //IMFVideoDisplayControlEx evrVideoDisplayControlPlus = evrVideoDisplayControl as IMFVideoDisplayControlEx;
                        //hr = evrVideoDisplayControlPlus.GetNativeVideoSize(out videoSize, out arVideoSize);
                        //hr = evrVideoDisplayControlPlus.GetIdealVideoSize(videoSize, arVideoSize);
                        arX = videoSize.Width;
                        arY = videoSize.Height;
                        arX2 = arVideoSize.Width;
                        arY2 = arVideoSize.Height;
                        Trace.WriteLineIf(trace.TraceVerbose, string.Format(("\tvideoRenderer.GetNativeVideoSize({0}, {1})"), videoSize.ToString(), arVideoSize.ToString()));
                    }
                    else
                        hr = (this.videoRenderer as IVMRWindowlessControl9).GetNativeVideoSize(out arX, out arY, out arX2, out arY2);
                    if (hr >= 0 && arY > 0)
                    {
                        //DsError.ThrowExceptionForHR(hr);
                        //Trace.WriteLineIf(trace.TraceVerbose, string.Format("\tGetNativeVideoSize(width: {0}, height: {1}, arX {2}, arY: {3}", arX, arY, arX2, arY2));

                        if (arX2 > 0 && arY2 > 0)
                        {
                            arX = arX2;
                            arY = arY2;
                        }

                        currentVideoSourceSize.Width = arX;
                        currentVideoSourceSize.Height = arY;

                        Size windowSize = windowRect.Size;

                        double newAspectRation = aspectRatioFactor * (double)arX / (double)arY * (this.useVideo169Mode ? 3.0 / 4.0 : 1.0);
                        int height = windowSize.Height;
                        int width = (int)((double)height * newAspectRation);

                        if (videoZoomMode == VideoSizeMode.FromInside || videoZoomMode == VideoSizeMode.FromOutside)
                        {
                            if (videoZoomMode == VideoSizeMode.FromInside && width > windowSize.Width
                            || videoZoomMode == VideoSizeMode.FromOutside && width < windowSize.Width)
                            {
                                width = windowSize.Width;
                                height = (int)((double)width / newAspectRation);
                            }
                        }

                        Size size = new Size((int)(zoom * width), (int)(zoom * height));

                        Point pos = new Point(
                            (int)(offset.X * (windowRect.Width * 3 - size.Width) - windowRect.Width),
                            (int)(offset.Y * (windowRect.Height * 3 - size.Height) - windowRect.Height));

                        //Point pos = new Point(
                        //    (int)(offset.X * (windowRect.Width - size.Width)),
                        //    (int)(offset.Y * (windowRect.Height - size.Height)));

                        currentVideoTargetRectangle = new Rectangle(pos, size);
                    }
                }
                if (useEVR)
                {
                    //hr = evrVideoDisplayControl.SetVideoWindow(this.hostingControl.Handle);
                    MFVideoNormalizedRect pnrcSource = new MFVideoNormalizedRect(0.0f, 0.0f, 1.0f, 1.0f);
                    hr = this.evrVideoDisplayControl.SetVideoPosition(pnrcSource, (MediaFoundation.Misc.MFRect)currentVideoTargetRectangle);
                    this.hostingControl.ModifyBlackBands(GetBlackBands(), Settings.VideoBackgroundColor);
                }
                else
                    hr = (this.videoRenderer as IVMRWindowlessControl9).SetVideoPosition(null, DsRect.FromRectangle(currentVideoTargetRectangle));
                //Trace.WriteLineIf(trace.TraceVerbose, string.Format(("\tPos {0:F2} {1:F2}, Zoom {2:F2}, ARF {4:F2}, AR {4:F2}"), offset.X, offset.Y, zoom, aspectRatioFactor, (float)videoTargetRect.Width / videoTargetRect.Height));
                Trace.WriteLineIf(trace.TraceVerbose, string.Format(("\tvideoRenderer.SetVideoPosition({0})"), currentVideoTargetRectangle.ToString()));
            }
        }
コード例 #17
0
        private void SetAspectRatio(Size? ratio = null, bool setVideoWindow = true)
        {
            int screenWidth;
            int screenHeight;

            if (m_graph == null)
                return;

            if (_isInExclusiveMode)
            {
                var size = System.Windows.Forms.Screen.FromControl(_hostForm).Bounds;

                screenWidth = size.Width;
                screenHeight = size.Height;
            }
            else
            {
                var hiddenWindowContentSize = _hostForm.Bounds;

                screenWidth = hiddenWindowContentSize.Width;
                screenHeight = hiddenWindowContentSize.Height;
            }

            // Set the display position to the entire window.
            if (_mPDisplay != null)
            {
                MFRect dRect = new MFRect(0, 0, screenWidth, screenHeight);
                MFSize vSize = new MFSize(), vAR = new MFSize();
                double m_ZoomX = 1, m_ZoomY = 1, m_PosX = 0.5, m_PosY = 0.5;
                MFVideoNormalizedRect sRect = new MFVideoNormalizedRect();
                sRect.top = 0;
                sRect.left = 0;
                sRect.right = 1;
                sRect.bottom = 1;

                int hr = _mPDisplay.GetNativeVideoSize(vSize, vAR);
                if (hr > -1)
                {
                    double dVideoAR = (double)vSize.Width / vSize.Height;

                    double dWRWidth = screenWidth;
                    double dWRHeight = screenHeight;

                    double dVRWidth = dWRHeight * dVideoAR;
                    double dVRHeight;

                    _logger.Debug("Scale: {0} Video Width: {1} Video Height: {2} X-AR: {3} Y-AR: {4}", _iVideoScaling, vSize.Width, vSize.Height, vAR.cx, vAR.cy);

                    switch (_iVideoScaling)
                    {
                        case VideoScalingScheme.HALF:
                            dVRWidth = vSize.Width * 0.5;
                            dVRHeight = vSize.Height * 0.5;
                            break;
                        case VideoScalingScheme.NORMAL:
                            dVRWidth = vSize.Width;
                            dVRHeight = vSize.Height;
                            break;
                        case VideoScalingScheme.DOUBLE:
                            dVRWidth = vSize.Width * 2.0;
                            dVRHeight = vSize.Height * 2.0;
                            break;
                        case VideoScalingScheme.STRETCH:
                            dVRWidth = dWRWidth;
                            dVRHeight = dWRHeight;
                            break;
                        default:
                        //ASSERT(FALSE);
                        // Fallback to "Touch Window From Inside" if settings were corrupted.
                        case VideoScalingScheme.FROMINSIDE:
                        case VideoScalingScheme.FROMOUTSIDE:
                            if ((screenWidth < dVRWidth) != (_iVideoScaling == VideoScalingScheme.FROMOUTSIDE))
                            {
                                dVRWidth = dWRWidth;
                                dVRHeight = dVRWidth / dVideoAR;
                            }
                            else
                            {
                                dVRHeight = dWRHeight;
                            }
                            break;
                        case VideoScalingScheme.ZOOM1:
                        case VideoScalingScheme.ZOOM2:
                            {
                                double minw = dWRWidth < dVRWidth ? dWRWidth : dVRWidth;
                                double maxw = dWRWidth > dVRWidth ? dWRWidth : dVRWidth;

                                double scale = _iVideoScaling == VideoScalingScheme.ZOOM1 ? 1.0 / 3.0 : 2.0 / 3.0;
                                dVRWidth = minw + (maxw - minw) * scale;
                                dVRHeight = dVRWidth / dVideoAR;
                                break;
                            }
                    }

                    // Scale video frame
                    double dScaledVRWidth = m_ZoomX * dVRWidth;
                    double dScaledVRHeight = m_ZoomY * dVRHeight;

                    // Position video frame
                    // left and top parts are allowed to be negative
                    dRect.left = (int)Math.Round(m_PosX * (dWRWidth * 3.0 - dScaledVRWidth) - dWRWidth);
                    dRect.top = (int)Math.Round(m_PosY * (dWRHeight * 3.0 - dScaledVRHeight) - dWRHeight);
                    // right and bottom parts are always at picture center or beyond, so never negative
                    dRect.right = (int)Math.Round(dRect.left + dScaledVRWidth);
                    dRect.bottom = (int)Math.Round(dRect.top + dScaledVRHeight);

                    //apply overscan
                    //dRect.top = dRect.top - (ps.OverscanHeight / 2);
                    //dRect.left = dRect.left - (ps.OverscanWidth / 2);
                    //dRect.right = dRect.right + (ps.OverscanWidth / 2);//this.Width;
                    //dRect.bottom = dRect.bottom + (ps.OverscanHeight / 2);//this.Height;
                }

                _mPDisplay.SetVideoPosition(sRect, dRect);
            }

            // Get Aspect Ratio
            int aspectX;
            int aspectY;

            if (ratio.HasValue)
            {
                aspectX = ratio.Value.Width;
                aspectY = ratio.Value.Height;
            }
            else
            {
                var basicVideo2 = (IBasicVideo2)m_graph;
                basicVideo2.GetPreferredAspectRatio(out aspectX, out aspectY);

                var sourceHeight = 0;
                var sourceWidth = 0;

                _basicVideo.GetVideoSize(out sourceWidth, out sourceHeight);

                if (aspectX == 0 || aspectY == 0 || sourceWidth > 0 || sourceHeight > 0)
                {
                    aspectX = sourceWidth;
                    aspectY = sourceHeight;
                }
            }

            // Adjust Video Size
            var iAdjustedHeight = 0;

            if (aspectX > 0 && aspectY > 0)
            {
                double adjustedHeight = aspectY * screenWidth;
                adjustedHeight /= aspectX;

                iAdjustedHeight = Convert.ToInt32(Math.Round(adjustedHeight));
            }

            if (screenHeight > iAdjustedHeight && iAdjustedHeight > 0)
            {
                double totalMargin = (screenHeight - iAdjustedHeight);
                var topMargin = Convert.ToInt32(Math.Round(totalMargin / 2));

                _basicVideo.SetDestinationPosition(0, topMargin, screenWidth, iAdjustedHeight);
            }
            else if (iAdjustedHeight > 0)
            {
                double adjustedWidth = aspectX * screenHeight;
                adjustedWidth /= aspectY;

                var iAdjustedWidth = Convert.ToInt32(Math.Round(adjustedWidth));

                double totalMargin = (screenWidth - iAdjustedWidth);
                var leftMargin = Convert.ToInt32(Math.Round(totalMargin / 2));

                _basicVideo.SetDestinationPosition(leftMargin, 0, iAdjustedWidth, screenHeight);
            }

            if (setVideoWindow)
            {
                _videoWindow.SetWindowPosition(0, 0, screenWidth, screenHeight);
            }
        }
コード例 #18
0
ファイル: MainForm.cs プロジェクト: babgvant/EVRPlay
        private void MoveVideoWindow()
        {
            int hr = 0;
            try
            {
                // Track the movement of the container window and resize as needed
                if (this.evrDisplay != null)
                {
                    MFVideoNormalizedRect sRect = new MFVideoNormalizedRect();
                    sRect.top = 0;
                    sRect.left = 0;
                    sRect.right = 1;
                    sRect.bottom = 1;
                    MediaFoundation.Misc.MFRect dRect = new MediaFoundation.Misc.MFRect();
                    MFSize vSize = new MFSize(), vAR = new MFSize();
                    double m_ZoomX = 1, m_ZoomY = 1, m_PosX = 0.5, m_PosY = 0.5;
                    //dRect.top = 0;
                    //dRect.left = 0;
                    //dRect.right = ClientRectangle.Width;//this.Width;
                    //dRect.bottom = ClientRectangle.Height;//this.Height;
                    //dRect.top = 0 - (ps.OverscanHeight / 2);
                    //dRect.left = 0 - (ps.OverscanWidth / 2);
                    //dRect.right = ClientRectangle.Width + (ps.OverscanWidth / 2);//this.Width;
                    //dRect.bottom = ClientRectangle.Height + (ps.OverscanHeight / 2);//this.Height;

                    hr = evrDisplay.GetNativeVideoSize(vSize, vAR);
                    DsError.ThrowExceptionForHR(hr);

                    double dVideoAR = (double)vSize.Width / vSize.Height;

                    double dWRWidth = ClientRectangle.Width;
                    double dWRHeight = ClientRectangle.Height;

                    double dVRWidth = dWRHeight * dVideoAR;
                    double dVRHeight;

                    switch (iVideoScaling)
                    {
                        case dvstype.HALF:
                            dVRWidth = vSize.Width * 0.5;
                            dVRHeight = vSize.Height * 0.5;
                            break;
                        case dvstype.NORMAL:
                            dVRWidth = vSize.Width;
                            dVRHeight = vSize.Height;
                            break;
                        case dvstype.DOUBLE:
                            dVRWidth = vSize.Width * 2.0;
                            dVRHeight = vSize.Height * 2.0;
                            break;
                        case dvstype.STRETCH:
                            dVRWidth = dWRWidth;
                            dVRHeight = dWRHeight;
                            break;
                        default:
                        //ASSERT(FALSE);
                        // Fallback to "Touch Window From Inside" if settings were corrupted.
                        case dvstype.FROMINSIDE:
                        case dvstype.FROMOUTSIDE:
                            if ((ClientRectangle.Width < dVRWidth) != (iVideoScaling == dvstype.FROMOUTSIDE))
                            {
                                dVRWidth = dWRWidth;
                                dVRHeight = dVRWidth / dVideoAR;
                            }
                            else
                            {
                                dVRHeight = dWRHeight;
                            }
                            break;
                        case dvstype.ZOOM1:
                        case dvstype.ZOOM2:
                            {
                                double minw = dWRWidth < dVRWidth ? dWRWidth : dVRWidth;
                                double maxw = dWRWidth > dVRWidth ? dWRWidth : dVRWidth;

                                double scale = iVideoScaling == dvstype.ZOOM1 ? 1.0 / 3.0 : 2.0 / 3.0;
                                dVRWidth = minw + (maxw - minw) * scale;
                                dVRHeight = dVRWidth / dVideoAR;
                                break;
                            }
                    }

                    // Scale video frame
                    double dScaledVRWidth = m_ZoomX * dVRWidth;
                    double dScaledVRHeight = m_ZoomY * dVRHeight;

                    // Position video frame
                    // left and top parts are allowed to be negative
                    dRect.left = (int)Math.Round(m_PosX * (dWRWidth * 3.0 - dScaledVRWidth) - dWRWidth);
                    dRect.top = (int)Math.Round(m_PosY * (dWRHeight * 3.0 - dScaledVRHeight) - dWRHeight);
                    // right and bottom parts are always at picture center or beyond, so never negative
                    dRect.right = (int)Math.Round(dRect.left + dScaledVRWidth);
                    dRect.bottom = (int)Math.Round(dRect.top + dScaledVRHeight);

                    //apply overscan
                    dRect.top = dRect.top - (ps.OverscanHeight / 2);
                    dRect.left = dRect.left - (ps.OverscanWidth / 2);
                    dRect.right = dRect.right + (ps.OverscanWidth / 2);//this.Width;
                    dRect.bottom = dRect.bottom + (ps.OverscanHeight / 2);//this.Height;

                    this.evrDisplay.SetVideoPosition(sRect, dRect);
                    Debug.Print("t: {0} l: {1} r:{2} b:{3}", dRect.top, dRect.left, dRect.right, dRect.bottom);
                }
            }
            catch (Exception ex)
            {
                FileLogger.Log("MoveVideoWindow Error: {0}", ex.Message);
            }
        }