Beispiel #1
0
    public int SetPosition(long tStart)
    {
        IMediaSeeking pSeek = m_pGraph as IMediaSeeking;
        int           hr    = E_NOINTERFACE;

        if (pSeek != null)
        {
            // input time is relative to clip start -- add on offset
            // from start of media
            tStart += m_tStart;
            if (m_tStop == 0)
            {
                hr = pSeek.SetPositions(
                    tStart,
                    AMSeekingSeekingFlags.AbsolutePositioning,
                    null,
                    AMSeekingSeekingFlags.NoPositioning);
            }
            else
            {
                hr = pSeek.SetPositions(
                    tStart,
                    AMSeekingSeekingFlags.AbsolutePositioning,
                    m_tStop,
                    AMSeekingSeekingFlags.AbsolutePositioning);
            }
        }
        return(hr);
    }
Beispiel #2
0
 public void ResetMedia()
 {
     if (mediaSeeking != null)
     {
         mediaSeeking.SetPositions(DsLong.FromInt64(0), AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
     }
 }
Beispiel #3
0
        /// <summary>
        /// Callback that handles events sent from the IFilterGraph
        /// </summary>
        private void HandleGraphEvent()
        {
            int       hr = 0;
            EventCode evCode;
            IntPtr    evParam1, evParam2;

            // Make sure that we don't access the media event interface
            // after it has already been released.
            if (_mediaEventEx == null)
            {
                return;
            }

            // Process all queued events
            while (_mediaEventEx.GetEvent(out evCode, out evParam1, out evParam2, 0) == 0)
            {
                // Free memory associated with callback, since we're not using it
                hr = _mediaEventEx.FreeEventParams(evCode, evParam1, evParam2);

                // If this is the end of the clip, reset to beginning
                if (evCode == EventCode.Complete)
                {
                    Stop();
                    // Rewind to first frame of movie
                    hr = _mediaSeeking.SetPositions((long)_timeSliderControl.Min * 10000, AMSeekingSeekingFlags.AbsolutePositioning,
                                                    null, AMSeekingSeekingFlags.NoPositioning);
                    _timeSliderControl.Pos = _timeSliderControl.Min;
                }
            }
        }
Beispiel #4
0
        public override void SeekAbsolute(double dTimeInSecs)
        {
            Log.Info("SeekAbsolute:seekabs:{0}", dTimeInSecs);


            if (_state != PlayState.Init)
            {
                if (_mediaCtrl != null && _mediaSeeking != null)
                {
                    if (dTimeInSecs < 0.0d)
                    {
                        dTimeInSecs = 0.0d;
                    }
                    if (dTimeInSecs > Duration)
                    {
                        dTimeInSecs = Duration;
                    }
                    dTimeInSecs = Math.Floor(dTimeInSecs);
                    Log.Info("RTSPPlayer: seekabs: {0} duration:{1} current pos:{2}", dTimeInSecs, Duration, CurrentPosition);
                    dTimeInSecs *= 10000000d;
                    long   pStop = 0;
                    long   lContentStart, lContentEnd;
                    double fContentStart, fContentEnd;
                    Log.Info("get available");
                    _mediaSeeking.GetAvailable(out lContentStart, out lContentEnd);
                    Log.Info("get available done");
                    fContentStart = lContentStart;
                    fContentEnd   = lContentEnd;

                    dTimeInSecs += fContentStart;
                    long lTime = (long)dTimeInSecs;
                    Log.Info("set positions");
                    if (VMR9Util.g_vmr9 != null)
                    {
                        VMR9Util.g_vmr9.FrameCounter = 123;
                    }
                    int hr = _mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning,
                                                        new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning);

                    if (VMR9Util.g_vmr9 != null)
                    {
                        VMR9Util.g_vmr9.FrameCounter = 123;
                    }
                    Log.Info("set positions done");
                    if (hr != 0)
                    {
                        Log.Error("seek failed->seek to 0 0x:{0:X}", hr);
                    }
                }
                UpdateCurrentPosition();
                if (dvbSubRenderer != null)
                {
                    dvbSubRenderer.OnSeek(CurrentPosition);
                }
                Log.Info("RTSPPlayer: current pos:{0}", CurrentPosition);
            }
        }
 public static void setNewPos(IMediaSeeking mediaSeeking, long newPos, double fps)
 {
     if (mediaSeeking == null) return;
     if (!isInFrames(mediaSeeking))
         newPos = frame2refTime(newPos,fps);
     mediaSeeking.SetPositions(newPos, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
 }
Beispiel #6
0
        /// <summary>
        /// Set the current position within a video file to the specified position.  This function is ignored when using a web-cam.
        /// </summary>
        /// <param name="lPosition">Specifies the new position to set.</param>
        public void SetPosition(long lPosition)
        {
            if (m_mediaSeek == null)
            {
                return;
            }

            long lDuration;
            int  hr = m_mediaSeek.GetDuration(out lDuration);

            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }

            if (lPosition < 0 || lPosition > lDuration)
            {
                throw new Exception("The postion specified is outside of the video duration range [0," + lDuration.ToString() + "].  Please specify a valid position.");
            }

            DsOptInt64 pos  = new DsOptInt64(lPosition);
            DsOptInt64 stop = new DsOptInt64(lDuration);

            hr = m_mediaSeek.SetPositions(pos, SeekingFlags.AbsolutePositioning, stop, SeekingFlags.AbsolutePositioning);
            if (hr < 0)
            {
                Marshal.ThrowExceptionForHR(hr);
            }
        }
Beispiel #7
0
        /// <summary>
        /// Se déplace à la position spécifiée dans la vidéo.
        /// </summary>
        /// <param name="pos">La position</param>
        void SeekToPosition(long pos)
        {
            // Définition de la position
            int hr = _mediaSeeking.SetPositions(pos, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);

            DsError.ThrowExceptionForHR(hr);
        }
Beispiel #8
0
        public void Seek(int timeInMs)
        {
            int hr = seeker.SetPositions(new DsLong(timeInMs * 10000), AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);

            DsError.ThrowExceptionForHR(hr);
            hr = control.GetState(timeOutMs, out state);             // state is Running
            DsError.ThrowExceptionForHR(hr);
        }
        //Play a specific shot
        public void PlayShot(int start, int stop)
        {
            long          l_start         = (long)start;
            long          l_stop          = (long)stop;
            IMediaSeeking i_media_seeking = m_FilterGraph as IMediaSeeking;

            i_media_seeking.SetTimeFormat(TimeFormat.Frame);
            i_media_seeking.SetPositions(l_start, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning, l_stop, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning);
        }
        //Reset: start position is 0, end position is the last frame, needed after playshot
        public void Reset()
        {
            IMediaSeeking i_media_seeking = m_FilterGraph as IMediaSeeking;

            i_media_seeking.SetTimeFormat(TimeFormat.Frame);
            long endFrame = 0;
            int  test     = i_media_seeking.GetDuration(out endFrame);

            i_media_seeking.SetPositions(0, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning, endFrame, DirectShowLib.AMSeekingSeekingFlags.AbsolutePositioning);
        }
Beispiel #11
0
        /// <summary> capture the next image </summary>
        public void Start()
        {
            int hr =
                m_MediaSeeking.SetPositions(10, AMSeekingSeekingFlags.SeekToKeyFrame, 1,
                                            AMSeekingSeekingFlags.IncrementalPositioning);

            DsError.ThrowExceptionForHR(hr);

            hr = m_mediaCtrl.Run();
            DsError.ThrowExceptionForHR(hr);
        }
Beispiel #12
0
        /// <summary>
        /// Stops playing the video
        /// </summary>
        public void Stop()
        {
            // Stop the FilterGraph
            m_mediaControl.Stop();

            // Reset the current position
            m_mediaSeeking.SetPositions(new DsOptInt64(0), SeekingFlags.AbsolutePositioning, new DsOptInt64(0), SeekingFlags.NoPositioning);

            // Update VideoState
            currentState = VideoState.Stopped;
        }
Beispiel #13
0
 public virtual HRESULT StepForward()
 {
     if (m_FrameStep != null)
     {
         if (!IsPaused)
         {
             Pause();
         }
         int hr = m_FrameStep.Step(1, null);
         if (hr < 0)
         {
             hr = m_MediaSeeking.GetCurrentPosition(out long _time);
             DsLong _stop = (long)0;
             var    _ts   = new TimeSpan(0, 0, 1);
             _time += _ts.Ticks / 20;
             DsLong _current = _time;
             hr = m_MediaSeeking.SetPositions(_current, AMSeekingSeekingFlags.AbsolutePositioning, _stop, AMSeekingSeekingFlags.NoPositioning);
         }
         return((HRESULT)hr);
     }
     return((HRESULT)E_POINTER);
 }
Beispiel #14
0
        public void StopClip()
        {
            int    hr  = 0;
            DsLong pos = new DsLong(0);

            if ((mediaControl == null) || (mediaSeeking == null))
            {
                return;
            }

            // Stop and reset postion to beginning
            if ((currentState == PlayState.Paused) || (currentState == PlayState.Running))
            {
                hr           = mediaControl.Stop();
                currentState = PlayState.Stopped;

                // Seek to the beginning
                hr = mediaSeeking.SetPositions(pos, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);

                // Display the first frame to indicate the reset condition
                hr = mediaControl.Pause();
            }
        }
Beispiel #15
0
        /// <summary>
        /// Test the SetPosition function
        /// </summary>
        void TestSetPosition()
        {
            int    hr;
            DsLong pc;

            pc = new DsLong(1);

            // Move the start forward by one, use null
            // for stop position
            hr = m_ims.SetPositions(
                pc,
                AMSeekingSeekingFlags.AbsolutePositioning,
                null,
                AMSeekingSeekingFlags.NoPositioning);
            Marshal.ThrowExceptionForHR(hr);

            // Try setting the stop position to 1, and null the start
            hr = m_ims.SetPositions(
                null,
                AMSeekingSeekingFlags.NoPositioning,
                pc,
                AMSeekingSeekingFlags.AbsolutePositioning);
            Marshal.ThrowExceptionForHR(hr);
        }
Beispiel #16
0
        /// <summary>
        /// Stops playing the video
        /// </summary>
        public void Stop()
        {
            // Stop the FilterGraph
            if (mc != null)
            {
                DsError.ThrowExceptionForHR(mc.Stop());
            }

            // Reset the current position
            if (ms != null)
            {
                DsError.ThrowExceptionForHR(ms.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning));
            }

            // Update VideoState
            currentState = VideoState.Stopped;
        }
Beispiel #17
0
    public int SetStopTime()
    {
        // for limits change of active clip: pass the stop time to graph

        IMediaSeeking pSeek = m_pGraph as IMediaSeeking;
        int           hr    = E_NOINTERFACE;

        if (pSeek != null)
        {
            hr = pSeek.SetPositions(
                null,
                AMSeekingSeekingFlags.NoPositioning,
                m_tStop,
                AMSeekingSeekingFlags.AbsolutePositioning);
        }
        return(hr);
    }
Beispiel #18
0
        // Set the position of the graph to a specified TimeSpan
        protected void SeekGraphToTime(TimeSpan seekTime)
        {
            SendDebugMessage("Seeking graph to time...");
            int           hr;
            IMediaControl mc = (IMediaControl)currentFilterGraph;
            // Stop graph if not stopped
            FilterState fs;

            mc.GetState(50, out fs);
            if (fs != FilterState.Stopped)
            {
                if (fs != FilterState.Stopped)
                {
                    mc.Stop();
                }
            }

            long   timeInSeconds          = (long)seekTime.TotalSeconds;
            DsLong dsTimeIn100NanoSeconds = DsLong.FromInt64(timeInSeconds * 10000000);

            SendDebugMessage("Setting position to " + dsTimeIn100NanoSeconds.ToInt64().ToString());
            long pos;

            if (UsingSBEFilter)
            {
                // IStreamBufferMediaSeeking is used directly on the source filter   http://msdn.microsoft.com/en-us/library/dd694950(v=vs.85).aspx
                IStreamBufferMediaSeeking mSeek = (IStreamBufferMediaSeeking)currentSBEfilter;
                hr = mSeek.SetPositions(dsTimeIn100NanoSeconds, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                DsError.ThrowExceptionForHR(hr);
                mSeek.GetCurrentPosition(out pos);
            }
            else
            {
                // IMediaSeeking is used on the filter graph which distributes the calls
                IMediaSeeking mSeek = (IMediaSeeking)currentFilterGraph;
                hr = mSeek.SetPositions(dsTimeIn100NanoSeconds, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                DsError.ThrowExceptionForHR(hr);
                mSeek.GetCurrentPosition(out pos);
            }

            SendDebugMessage("New pos is " + pos.ToString());
        }
Beispiel #19
0
        private void seekbar_Scroll(object sender, EventArgs e)
        {
            // Update the position continuously.
            if (_mediaSeek != null)
            {
                long temp = ONE_MSEC * (long)seekbar.Value;
                try
                {
                    if (!mvs.LocalMedia[0].IsDVD)
                    {
                        //                    btnStop_Click(null, null);
                        int hr = _mediaSeek.SetPositions(temp, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
                        DsError.ThrowExceptionForHR(hr);
                        return;
                    }
//                    if (_IsScrolling) return;
//                    DvdHMSFTimeCode t5 = ConvertToDvdHMSFTimeCode(TimeSpan.FromMilliseconds(seekbar.Value));

//                    IDvdCmd _cmdOption;

//                    TimeSpan t3 = TimeSpan.Parse(mvs.OffsetTime);
//                    t3 = t3.Add(TimeSpan.FromMilliseconds(seekbar.Value));

//                    DvdHMSFTimeCode t1 = ConvertToDvdHMSFTimeCode(TimeSpan.Parse(mvs.OffsetTime));
//                    DvdHMSFTimeCode t2 = ConvertToDvdHMSFTimeCode(t3);

//                    if (state == FilterState.Stopped) hr = _dvdCtrl.PlayPeriodInTitleAutoStop(1, t1, t2, DvdCmdFlags.None, out _cmdOption);
//                    _IsScrolling = true;
                    RunGraph();
//                    _IsScrolling = false;
                    //                    int hr1 = _dvdCtrl.PlayAtTime(t2, DvdCmdFlags.Flush, out _cmdOption);
                    //                    DsError.ThrowExceptionForHR(hr1);

                    label4.Text = seekbar.Value.ToString();
                }
                catch (Exception ex)
                {
                    logger.ErrorException("Error in seeking : \r\n\r\n", ex);
                }
            }
        }
Beispiel #20
0
        /// <summary>
        /// Queries the current video source for its capabilities regarding seeking and time info.
        /// The graph should be fully constructed for accurate information
        /// </summary>
        protected void QuerySeekingCapabilities()
        {
            try
            {
                _mediaSeeking.SetTimeFormat(TimeFormat.MediaTime);
                //get capabilities from the graph, and see what it supports that interests us
                AMSeekingSeekingCapabilities caps;
                int    r       = _mediaSeeking.GetCapabilities(out caps);
                long   lTest   = 0;
                double dblTest = 0;
                if (r != 0)
                {
                    _seek_canGetCurrentPos = false;
                    _seek_canSeek          = false;
                    _seek_canGetDuration   = false;
                }
                else    //if we were able to read the capabilities, then determine if the capability works, both by checking the
                // advertisement, and actually trying it out.
                {
                    _seek_canSeek = ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute) &&
                                    (_mediaSeeking.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning,
                                                                null, AMSeekingSeekingFlags.NoPositioning) == 0);

                    _seek_canGetDuration = ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration) &&
                                           (_mediaSeeking.GetDuration(out lTest) == 0);

                    _seek_canGetCurrentPos = ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos) &&
                                             (_mediaSeeking.GetCurrentPosition(out lTest) == 0);
                }

                //check capabilities for the IMediaPosition interface
                _pos_canSeek          = (_mediaPosition.put_CurrentPosition(0) == 0);
                _pos_canGetDuration   = (_mediaPosition.get_Duration(out dblTest) == 0);
                _pos_canGetCurrentPos = (_mediaPosition.get_CurrentPosition(out dblTest) == 0);
            }
            catch (Exception)
            {
                _seek_canSeek = false;
                _pos_canSeek  = false;
            }
        }
Beispiel #21
0
        public void SetPosition(long pos)
        {
            if (m_pControl == null || m_pSeek == null)
            {
                throw new COMException("pointers not set", E_Unexpected);
            }

            int hr;

            hr = m_pSeek.SetPositions(
                pos,
                AMSeekingSeekingFlags.AbsolutePositioning,
                null,
                AMSeekingSeekingFlags.NoPositioning);
            DsError.ThrowExceptionForHR(hr);

            if (m_ImageHandlers != null)
            {
                for (int x = 1; x < m_ImageHandlers.Length; x++)
                {
                    IMediaSeeking ims = m_pPins[x] as IMediaSeeking;

                    hr = ims.SetPositions(
                        pos,
                        AMSeekingSeekingFlags.AbsolutePositioning,
                        null,
                        AMSeekingSeekingFlags.NoPositioning);
                    DsError.ThrowExceptionForHR(hr);
                }
            }

            // If playback is stopped, we need to put the graph into the paused
            // state to update the video renderer with the new frame, and then stop
            // the graph again. The IMediaControl::StopWhenReady does this.
            if (m_state == PlaybackState.Stopped)
            {
                hr = m_pControl.StopWhenReady();
                DsError.ThrowExceptionForHR(hr);
            }
        }
Beispiel #22
0
        /// <summary>
        /// Stop the media stream.
        /// </summary>
        public void Stop()
        {
            int hr = 0;

            // Stop and reset postion to beginning
            if ((_currentState == PlayState.Paused) || (_currentState == PlayState.Running))
            {
                DsLong pos = new DsLong(0);

                hr = _mediaControl.Stop();
                DsError.ThrowExceptionForHR(hr);
                _currentState = PlayState.Stopped;

                // Seek to the beginning
                hr = _mediaSeeking.SetPositions(pos, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
                DsError.ThrowExceptionForHR(hr);

                // Display the first frame to indicate the reset condition
                hr = _mediaControl.Pause();
                DsError.ThrowExceptionForHR(hr);
            }
        }
Beispiel #23
0
        // Seeking - Experimental, non-functional
        private void DoSeekToTime(TimeSpan seekTime)
        {
            int           hr;
            IMediaControl mc = (IMediaControl)currentFilterGraph;

            // Stop
            hr = mc.Stop();
            DsError.ThrowExceptionForHR(hr);
            // Stop ASFWriter
            hr = currentOutputFilter.Stop();
            DsError.ThrowExceptionForHR(hr);
            // Seek
            Int64  seekTimeNanoSeconds = Convert.ToInt64(seekTime.TotalSeconds * 10000000);
            DsLong dsTime = DsLong.FromInt64(seekTimeNanoSeconds);

            if (UsingSBEFilter)
            {
                IStreamBufferMediaSeeking mSeek = (IStreamBufferMediaSeeking)currentSBEfilter;  // StreamBufferMediaSeeking is used on the Source Filter, NOT the graph - see MSDN
                hr = mSeek.SetPositions(dsTime, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                DsError.ThrowExceptionForHR(hr);
            }
            else
            {
                // IMediaSeeking is used on the filter graph which distributes the calls
                IMediaSeeking mSeek = (IMediaSeeking)currentFilterGraph;
                hr = mSeek.SetPositions(dsTime, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                DsError.ThrowExceptionForHR(hr);
            }


            // Start ASF
            hr = currentOutputFilter.Run(0);
            DsError.ThrowExceptionForHR(hr);
            // Run again
            hr = mc.Run();
            DsError.ThrowExceptionForHR(hr);
        }
Beispiel #24
0
        /// <summary>
        /// Stops playing the video
        /// </summary>
        public void Stop()
        {
            // End Threads
            if (updateThread != null)
            {
                updateThread.Abort();
            }
            updateThread = null;

            if (waitThread != null)
            {
                waitThread.Abort();
            }
            waitThread = null;

            // Stop the FilterGraph
            m_mediaControl.Stop();

            // Reset the current position
            m_mediaSeeking.SetPositions(new DsOptInt64(0), SeekingFlags.AbsolutePositioning, new DsOptInt64(0), SeekingFlags.NoPositioning);

            // Update VideoState
            currentState = VideoState.Stopped;
        }
Beispiel #25
0
        /// <summary>
        /// Stops playing the video
        /// </summary>
        public void Stop()
        {
            // End Threads
            if (updateThread != null)
            {
                updateThread.Abort();
            }
            updateThread = null;

            if (waitThread != null)
            {
                waitThread.Abort();
            }
            waitThread = null;

            // Stop the FilterGraph
            DsError.ThrowExceptionForHR(mc.Stop());

            // Reset the current position
            DsError.ThrowExceptionForHR(ms.SetPositions(0, AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning));

            // Update VideoState
            currentState = VideoState.Stopped;
        }
Beispiel #26
0
        public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                              MediaPortal.Core.Transcoding.Quality quality, Standard standard)
        {
            if (!Supports(format))
            {
                return(false);
            }
            string ext = System.IO.Path.GetExtension(info.file);

            if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg")
            {
                Log.Info("TSReader2MP4: wrong file format");
                return(false);
            }
            try
            {
                graphBuilder = (IGraphBuilder) new FilterGraph();
                _rotEntry    = new DsROTEntry((IFilterGraph)graphBuilder);
                Log.Info("TSReader2MP4: add filesource");
                TsReader reader = new TsReader();
                tsreaderSource = (IBaseFilter)reader;
                IBaseFilter filter = (IBaseFilter)tsreaderSource;
                graphBuilder.AddFilter(filter, "TSReader Source");
                IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
                Log.Info("TSReader2MP4: load file:{0}", info.file);
                int hr = fileSource.Load(info.file, null);
                //add audio/video codecs
                string strVideoCodec     = "";
                string strH264VideoCodec = "";
                string strAudioCodec     = "";
                string strAACAudioCodec  = "";
                using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
                {
                    strVideoCodec     = xmlreader.GetValueAsString("mytv", "videocodec", "");
                    strAudioCodec     = xmlreader.GetValueAsString("mytv", "audiocodec", "");
                    strAACAudioCodec  = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
                    strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
                }
                //Find the type of decoder required for the output video & audio pins on TSReader.
                Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders");
                IPin pinOut0, pinOut1;
                IPin pinIn0, pinIn1;
                pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
                pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
                if (pinOut0 == null || pinOut1 == null)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader");
                    Cleanup();
                    return(false);
                }
                bool            usingAAC = false;
                IEnumMediaTypes enumMediaTypes;
                hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
                while (true)
                {
                    AMMediaType[] mediaTypes = new AMMediaType[1];
                    int           typesFetched;
                    hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
                    if (hr != 0 || typesFetched == 0)
                    {
                        break;
                    }
                    if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
                    {
                        Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader");
                        usingAAC = true;
                    }
                }
                bool usingH264 = false;
                hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
                while (true)
                {
                    AMMediaType[] mediaTypes = new AMMediaType[1];
                    int           typesFetched;
                    hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
                    if (hr != 0 || typesFetched == 0)
                    {
                        break;
                    }
                    if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
                    {
                        Log.Info("TSReader2MP4: found H.264 video out pin on tsreader");
                        usingH264 = true;
                    }
                }
                //Add the type of decoder required for the output video & audio pins on TSReader.
                Log.Info("TSReader2MP4: add audio/video decoders to graph");
                if (usingH264 == false)
                {
                    Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec);
                    VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
                    if (VideoCodec == null)
                    {
                        Log.Error("TSReader2MP4: unable to add mpeg2 video decoder");
                        Cleanup();
                        return(false);
                    }
                }
                else
                {
                    Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec);
                    VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
                    if (VideoCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec");
                        Cleanup();
                        return(false);
                    }
                }
                if (usingAAC == false)
                {
                    Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec);
                    AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
                    if (AudioCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec");
                        Cleanup();
                        return(false);
                    }
                }
                else
                {
                    Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec);
                    AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
                    if (AudioCodec == null)
                    {
                        Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec");
                        Cleanup();
                        return(false);
                    }
                }
                Log.Info("TSReader2MP4: connect tsreader->audio/video decoders");
                //connect output #0 (audio) of tsreader->audio decoder input pin 0
                //connect output #1 (video) of tsreader->video decoder input pin 0
                pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
                pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
                if (pinIn0 == null || pinIn1 == null)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs");
                    Cleanup();
                    return(false);
                }
                hr = graphBuilder.Connect(pinOut0, pinIn0);
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                hr = graphBuilder.Connect(pinOut1, pinIn1);
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                //add encoders, muxer & filewriter
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }
                //setup graph controls
                mediaControl = graphBuilder as IMediaControl;
                mediaSeeking = tsreaderSource as IMediaSeeking;
                mediaEvt     = graphBuilder as IMediaEventEx;
                mediaPos     = graphBuilder as IMediaPosition;
                //get file duration
                Log.Info("TSReader2MP4: Get duration of recording");
                long lTime = 5 * 60 * 60;
                lTime *= 10000000;
                long pStop = 0;
                hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                               AMSeekingSeekingFlags.NoPositioning);
                if (hr == 0)
                {
                    long lStreamPos;
                    mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
                    m_dDuration = lStreamPos;
                    lTime       = 0;
                    mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                              AMSeekingSeekingFlags.NoPositioning);
                }
                double duration = m_dDuration / 10000000d;
                Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));
                //run the graph to initialize the filters to be sure
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
                int maxCount = 20;
                while (true)
                {
                    long lCurrent;
                    mediaSeeking.GetCurrentPosition(out lCurrent);
                    double dpos = (double)lCurrent;
                    dpos /= 10000000d;
                    System.Threading.Thread.Sleep(100);
                    if (dpos >= 2.0d)
                    {
                        break;
                    }
                    maxCount--;
                    if (maxCount <= 0)
                    {
                        break;
                    }
                }
                mediaControl.Stop();
                FilterState state;
                mediaControl.GetState(500, out state);
                GC.Collect();
                GC.Collect();
                GC.Collect();
                GC.WaitForPendingFinalizers();
                graphBuilder.RemoveFilter(mp4Muxer);
                graphBuilder.RemoveFilter(h264Encoder);
                graphBuilder.RemoveFilter(aacEncoder);
                graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
                if (!AddCodecs(graphBuilder, info))
                {
                    return(false);
                }
                //Set Encoder quality & Muxer settings
                if (!EncoderSet(graphBuilder, info))
                {
                    return(false);
                }
                //start transcoding - run the graph
                Log.Info("TSReader2MP4: start transcoding");
                //setup flow control
                //need to leverage CBAsePin, CPullPin & IAsyncReader methods.
                IAsyncReader synchVideo = null;
                mediaSample = VideoCodec as IMediaSample;
                hr          = synchVideo.SyncReadAligned(mediaSample);
                //So we only parse decoder output whent the encoders are ready.
                hr = mediaControl.Run();
                if (hr != 0)
                {
                    Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr);
                    Cleanup();
                    return(false);
                }
            }
            catch (Exception ex)
            {
                Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message);
                Cleanup();
                return(false);
            }
            return(true);
        }
Beispiel #27
0
        /// <summary>
        /// Worker thread.
        /// </summary>
        ///
        private void WorkerThread()
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            // grabber
            Grabber grabber = new Grabber(this);

            // objects
            object graphObject   = null;
            object grabberObject = null;

            // interfaces
            IGraphBuilder  graph         = null;
            IBaseFilter    sourceBase    = null;
            IBaseFilter    grabberBase   = null;
            ISampleGrabber sampleGrabber = null;
            IMediaControl  mediaControl  = null;

            IMediaEventEx mediaEvent   = null;
            IMediaSeeking mediaSeeking = null;

            try
            {
                // get type for filter graph
                Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating filter graph");
                }

                // create filter graph
                graphObject = Activator.CreateInstance(type);
                graph       = (IGraphBuilder)graphObject;

                // create source device's object
                graph.AddSourceFilter(_fileName, "source", out sourceBase);
                if (sourceBase == null)
                {
                    throw new ApplicationException("Failed creating source filter");
                }

                // get type for sample grabber
                type = Type.GetTypeFromCLSID(Clsid.SampleGrabber);
                if (type == null)
                {
                    throw new ApplicationException("Failed creating sample grabber");
                }

                // create sample grabber
                grabberObject = Activator.CreateInstance(type);
                sampleGrabber = (ISampleGrabber)grabberObject;
                grabberBase   = (IBaseFilter)grabberObject;

                // add grabber filters to graph
                graph.AddFilter(grabberBase, "grabber");

                // set media type
                AMMediaType mediaType = new AMMediaType
                {
                    MajorType = MediaType.Video,
                    SubType   = MediaSubType.RGB24
                };
                sampleGrabber.SetMediaType(mediaType);

                // connect pins
                int pinToTry = 0;

                IPin inPin  = Tools.GetInPin(grabberBase, 0);
                IPin outPin = null;

                // find output pin acceptable by sample grabber
                while (true)
                {
                    outPin = Tools.GetOutPin(sourceBase, pinToTry);

                    if (outPin == null)
                    {
                        Marshal.ReleaseComObject(inPin);
                        throw new ApplicationException("Did not find acceptable output video pin in the given source");
                    }

                    if (graph.Connect(outPin, inPin) < 0)
                    {
                        Marshal.ReleaseComObject(outPin);
                        outPin = null;
                        pinToTry++;
                    }
                    else
                    {
                        break;
                    }
                }

                Marshal.ReleaseComObject(outPin);
                Marshal.ReleaseComObject(inPin);

                // get media type
                if (sampleGrabber.GetConnectedMediaType(mediaType) == 0)
                {
                    VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader));

                    grabber.Width  = vih.BmiHeader.Width;
                    grabber.Height = vih.BmiHeader.Height;
                    mediaType.Dispose();
                }

                // let's do rendering, if we don't need to prevent freezing
                if (!_preventFreezing)
                {
                    // render pin
                    graph.Render(Tools.GetOutPin(grabberBase, 0));

                    // configure video window
                    IVideoWindow window = (IVideoWindow)graphObject;
                    window.put_AutoShow(false);
                    window = null;
                }

                // configure sample grabber
                sampleGrabber.SetBufferSamples(false);
                sampleGrabber.SetOneShot(false);
                sampleGrabber.SetCallback(grabber, 1);

                // disable clock, if someone requested it
                if (!_referenceClockEnabled)
                {
                    IMediaFilter mediaFilter = (IMediaFilter)graphObject;
                    mediaFilter.SetSyncSource(null);
                }

                // get media control
                mediaControl = (IMediaControl)graphObject;

                // get media events' interface
                mediaEvent = (IMediaEventEx)graphObject;

                // Get media seeking & check seeking capability
                mediaSeeking = (IMediaSeeking)graphObject;
                mediaSeeking.GetDuration(out _duration);
                _onVideoLoad(_duration);
                const SeekingCapabilities caps = SeekingCapabilities.CanSeekAbsolute | SeekingCapabilities.CanGetDuration;
                SeekingCapabilities       canSeekCap;
                int hr = mediaSeeking.GetCapabilities(out canSeekCap);
                if (hr < 0)
                {
                    throw new ApplicationException("Failed getting seeking capabilities");
                }
                _isSeekEnabled = (canSeekCap & caps) == caps;

                // run
                mediaControl.Run();
                IsPlaying = true;
                do
                {
                    // GetCurrentTime
                    if (_isGetCurrentTime)
                    {
                        mediaSeeking.GetCurrentPosition(out _currentGetTime);
                        _isGetCurrentTime = false;
                    }
                    if (IsSetPause)
                    {
                        mediaControl.Pause();
                        IsSetPause = false;
                        IsPlaying  = false;
                    }
                    if (IsSetPlay)
                    {
                        mediaControl.Run();
                        IsSetPlay = false;
                        IsPlaying = true;
                    }
                    // SetCurrentTime
                    if (_isSetCurrentTime)
                    {
                        long stop = 0;
                        mediaSeeking.SetPositions(ref _currentSetTime, SeekingFlags.AbsolutePositioning, ref stop,
                                                  SeekingFlags.NoPositioning);
                        _isSetCurrentTime = false;
                    }
                    IntPtr   p1;
                    IntPtr   p2;
                    DsEvCode code;
                    if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0)
                    {
                        mediaEvent.FreeEventParams(code, p1, p2);

                        if (code == DsEvCode.Complete)
                        {
                            reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                            break;
                        }
                    }
                } while (!_stopEvent.WaitOne(100, false));
                IsPlaying = false;
                mediaControl.Stop();
            }
            catch (Exception exception)
            {
                // provide information to clients
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message));
                }
            }
            finally
            {
                // release all objects
                graph         = null;
                grabberBase   = null;
                sampleGrabber = null;
                mediaControl  = null;
                mediaEvent    = null;
                mediaSeeking  = null;

                if (graphObject != null)
                {
                    Marshal.ReleaseComObject(graphObject);
                    graphObject = null;
                }
                if (sourceBase != null)
                {
                    Marshal.ReleaseComObject(sourceBase);
                    sourceBase = null;
                }
                if (grabberObject != null)
                {
                    Marshal.ReleaseComObject(grabberObject);
                    grabberObject = null;
                }
            }

            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
Beispiel #28
0
 public void Seek(int timeInMs)
 {
     DsError.ThrowExceptionForHR(seeker.SetPositions(new DsLong(timeInMs * 10000), AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning));
     DsError.ThrowExceptionForHR(control.GetState(1000, out state));
 }
Beispiel #29
0
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
     try
     {
         if (!Supports(format))
         {
             return(false);
         }
         string ext = System.IO.Path.GetExtension(info.file);
         if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg")
         {
             Log.Info("TSReader2WMV: wrong file format");
             return(false);
         }
         Log.Info("TSReader2WMV: create graph");
         graphBuilder = (IGraphBuilder) new FilterGraph();
         _rotEntry    = new DsROTEntry((IFilterGraph)graphBuilder);
         Log.Info("TSReader2WMV: add filesource");
         TsReader reader = new TsReader();
         tsreaderSource = (IBaseFilter)reader;
         //ITSReader ireader = (ITSReader)reader;
         //ireader.SetTsReaderCallback(this);
         //ireader.SetRequestAudioChangeCallback(this);
         IBaseFilter filter = (IBaseFilter)tsreaderSource;
         graphBuilder.AddFilter(filter, "TSReader Source");
         IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
         Log.Info("TSReader2WMV: load file:{0}", info.file);
         int hr = fileSource.Load(info.file, null);
         //add audio/video codecs
         string strVideoCodec     = "";
         string strH264VideoCodec = "";
         string strAudioCodec     = "";
         string strAACAudioCodec  = "";
         using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
         {
             strVideoCodec     = xmlreader.GetValueAsString("mytv", "videocodec", "");
             strAudioCodec     = xmlreader.GetValueAsString("mytv", "audiocodec", "");
             strAACAudioCodec  = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
             strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
         }
         //Find the type of decoder required for the output video & audio pins on TSReader.
         Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders");
         IPin pinOut0, pinOut1;
         IPin pinIn0, pinIn1;
         pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
         pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
         if (pinOut0 == null || pinOut1 == null)
         {
             Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader");
             Cleanup();
             return(false);
         }
         bool            usingAAC = false;
         IEnumMediaTypes enumMediaTypes;
         hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
         while (true)
         {
             AMMediaType[] mediaTypes = new AMMediaType[1];
             int           typesFetched;
             hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
             if (hr != 0 || typesFetched == 0)
             {
                 break;
             }
             if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
             {
                 Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader");
                 usingAAC = true;
             }
         }
         bool usingH264 = false;
         hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
         while (true)
         {
             AMMediaType[] mediaTypes = new AMMediaType[1];
             int           typesFetched;
             hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
             if (hr != 0 || typesFetched == 0)
             {
                 break;
             }
             if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
             {
                 Log.Info("TSReader2WMV: found H.264 video out pin on tsreader");
                 usingH264 = true;
             }
         }
         //Add the type of decoder required for the output video & audio pins on TSReader.
         Log.Info("TSReader2WMV: add audio/video decoders to graph");
         if (usingH264 == false)
         {
             Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec);
             VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
             if (VideoCodec == null)
             {
                 Log.Error("TSReader2WMV: unable to add mpeg2 video decoder");
                 Cleanup();
                 return(false);
             }
         }
         else
         {
             Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec);
             VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
             if (VideoCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec");
                 Cleanup();
                 return(false);
             }
         }
         if (usingAAC == false)
         {
             Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
             AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
             if (AudioCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec");
                 Cleanup();
                 return(false);
             }
         }
         else
         {
             Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec);
             AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
             if (AudioCodec == null)
             {
                 Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec");
                 Cleanup();
                 return(false);
             }
         }
         Log.Info("TSReader2WMV: connect tsreader->audio/video decoders");
         //connect output #0 (audio) of tsreader->audio decoder input pin 0
         //connect output #1 (video) of tsreader->video decoder input pin 0
         pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
         pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
         if (pinIn0 == null || pinIn1 == null)
         {
             Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs");
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut0, pinIn0);
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         hr = graphBuilder.Connect(pinOut1, pinIn1);
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("TSReader2WMV: start pre-run");
         mediaControl = graphBuilder as IMediaControl;
         mediaSeeking = tsreaderSource as IMediaSeeking;
         mediaEvt     = graphBuilder as IMediaEventEx;
         mediaPos     = graphBuilder as IMediaPosition;
         //get file duration
         long lTime = 5 * 60 * 60;
         lTime *= 10000000;
         long pStop = 0;
         hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                        AMSeekingSeekingFlags.NoPositioning);
         if (hr == 0)
         {
             long lStreamPos;
             mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
             m_dDuration = lStreamPos;
             lTime       = 0;
             mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                       AMSeekingSeekingFlags.NoPositioning);
         }
         double duration = m_dDuration / 10000000d;
         Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
         int maxCount = 20;
         while (true)
         {
             long lCurrent;
             mediaSeeking.GetCurrentPosition(out lCurrent);
             double dpos = (double)lCurrent;
             dpos /= 10000000d;
             System.Threading.Thread.Sleep(100);
             if (dpos >= 2.0d)
             {
                 break;
             }
             maxCount--;
             if (maxCount <= 0)
             {
                 break;
             }
         }
         Log.Info("TSReader2WMV: pre-run done");
         Log.Info("TSReader2WMV: Get duration of movie");
         mediaControl.Stop();
         FilterState state;
         mediaControl.GetState(500, out state);
         GC.Collect();
         GC.Collect();
         GC.Collect();
         GC.WaitForPendingFinalizers();
         Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer");
         graphBuilder.RemoveFilter(fileWriterbase);
         if (!AddWmAsfWriter(outputFilename, quality, standard))
         {
             return(false);
         }
         Log.Info("TSReader2WMV: Start transcoding");
         hr = mediaControl.Run();
         if (hr != 0)
         {
             Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr);
             Cleanup();
             return(false);
         }
     }
     catch (Exception e)
     {
         // TODO: Handle exceptions.
         Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
         return(false);
     }
     return(true);
 }
        public override int Enable(int lIndex, AMStreamSelectEnableFlags dwFlags)
        {
            bool changed        = false;
            uint oldAudioStream = _streamParser.InputStream.AudioStream.StreamId;

            for (int index = 0; index < _streamParser.SelectableTracks.Count; index++)
            {
                var track = _streamParser.SelectableTracks[index];

                bool isEnabled = (
                    index == lIndex && dwFlags == AMStreamSelectEnableFlags.Enable || // the current index should be enabled
                    dwFlags == AMStreamSelectEnableFlags.EnableAll                    // all should be enabled
                    ) && dwFlags != AMStreamSelectEnableFlags.DisableAll;             // must not be "Disable All"

                changed |= _streamParser.InputStream.EnableStream((int)track.StreamId, isEnabled);
            }
            uint newAudioStream = _streamParser.InputStream.AudioStream.StreamId;

            if (!changed)
            {
                return(NOERROR);
            }

            // Update output pin
            var audioPin = Pins.OfType <SplitterOutputPin>().FirstOrDefault(p => p.Track.Type == DemuxTrack.TrackType.Audio);

            if (audioPin != null)
            {
                AMMediaType mt;
                if (MediaTypeBuilder.TryGetType(_streamParser.InputStream.AudioStream, out mt))
                {
                    _streamParser.Tracks[1].SetStreamMediaType(mt);
                }
                var res = RenameOutputPin(audioPin, oldAudioStream, newAudioStream);
            }

            if (IsActive && dwFlags != AMStreamSelectEnableFlags.DisableAll)
            {
                try
                {
                    IMediaSeeking seeking = (IMediaSeeking)FilterGraph;
                    if (seeking != null)
                    {
                        long current;
                        seeking.GetCurrentPosition(out current);
                        // Only seek during playback, not on initial selection
                        if (current != 0)
                        {
                            current -= UNITS / 10;
                            seeking.SetPositions(current, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
                            current += UNITS / 10;
                            seeking.SetPositions(current, AMSeekingSeekingFlags.AbsolutePositioning, null, AMSeekingSeekingFlags.NoPositioning);
                        }
                    }
                }
                catch
                {
                }
            }
            return(NOERROR);
        }
Beispiel #31
0
        private static Bitmap GetBitmap(IGraphBuilder graph, ISampleGrabber sg, long grabPosition, out EventCode ec)
        {
            IntPtr pBuffer     = IntPtr.Zero;
            int    pBufferSize = 0;
            Bitmap b           = null;
            int    hr          = 0;

            try
            {
                IMediaSeeking ims = graph as IMediaSeeking;

                bool canDuration = false;
                bool canPos      = false;
                bool canSeek     = false;
                long pDuration   = 0;
                long pCurrent    = 0;

                if (ims != null)
                {
                    AMSeekingSeekingCapabilities caps;

                    hr = ims.GetCapabilities(out caps);
                    if ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration)
                    {
                        canDuration = true;
                    }
                    if ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos)
                    {
                        canPos = true;
                    }
                    if ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute)
                    {
                        canSeek = true;
                    }

                    if (canDuration)
                    {
                        hr = ims.GetDuration(out pDuration);
                    }

                    if (grabPosition > pDuration)
                    {
                        grabPosition = pDuration - 1;
                    }

                    if (canSeek)
                    {
                        hr = ims.SetPositions(new DsLong(grabPosition), AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning);
                        DsError.ThrowExceptionForHR(hr);
                    }

                    if (canPos)
                    {
                        hr = ims.GetCurrentPosition(out pCurrent);
                    }
                }

                if (canPos)
                {
                    hr = ims.GetCurrentPosition(out pCurrent);
                }

                IMediaControl mControl = graph as IMediaControl;
                IMediaEvent   mEvent   = graph as IMediaEvent;

                //ec = EventCode.SystemBase;

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Run();
                DsError.ThrowExceptionForHR(hr);

                hr = mEvent.WaitForCompletion(int.MaxValue, out ec);
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Pause();
                DsError.ThrowExceptionForHR(hr);

                hr = mControl.Stop();
                DsError.ThrowExceptionForHR(hr);

                if (ec != EventCode.Complete)
                {
                    return(null);
                }

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                pBuffer = Marshal.AllocCoTaskMem(pBufferSize);

                hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer);
                DsError.ThrowExceptionForHR(hr);

                if (pBuffer != IntPtr.Zero)
                {
                    AMMediaType sgMt        = new AMMediaType();
                    int         videoWidth  = 0;
                    int         videoHeight = 0;
                    int         stride      = 0;

                    try
                    {
                        hr = sg.GetConnectedMediaType(sgMt);
                        DsError.ThrowExceptionForHR(hr);

                        if (sgMt.formatPtr != IntPtr.Zero)
                        {
                            if (sgMt.formatType == FormatType.VideoInfo)
                            {
                                VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(sgMt.formatPtr, typeof(VideoInfoHeader));
                                videoWidth  = vih.BmiHeader.Width;
                                videoHeight = vih.BmiHeader.Height;
                                stride      = videoWidth * (vih.BmiHeader.BitCount / 8);
                            }
                            else
                            {
                                throw new ApplicationException("Unsupported Sample");
                            }

                            b = new Bitmap(videoWidth, videoHeight, stride, System.Drawing.Imaging.PixelFormat.Format32bppRgb, pBuffer);
                            b.RotateFlip(RotateFlipType.RotateNoneFlipY);
                        }
                    }
                    finally
                    {
                        DsUtils.FreeAMMediaType(sgMt);
                    }
                }

                return(b);
            }
            finally
            {
                if (pBuffer != IntPtr.Zero)
                {
                    Marshal.FreeCoTaskMem(pBuffer);
                }
            }
        }
Beispiel #32
0
 public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format,
                       MediaPortal.Core.Transcoding.Quality quality, Standard standard)
 {
   if (!Supports(format)) return false;
   string ext = System.IO.Path.GetExtension(info.file);
   if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg")
   {
     Log.Info("TSReader2MP4: wrong file format");
     return false;
   }
   try
   {
     graphBuilder = (IGraphBuilder)new FilterGraph();
     _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
     Log.Info("TSReader2MP4: add filesource");
     TsReader reader = new TsReader();
     tsreaderSource = (IBaseFilter)reader;
     IBaseFilter filter = (IBaseFilter)tsreaderSource;
     graphBuilder.AddFilter(filter, "TSReader Source");
     IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
     Log.Info("TSReader2MP4: load file:{0}", info.file);
     int hr = fileSource.Load(info.file, null);
     //add audio/video codecs
     string strVideoCodec = "";
     string strH264VideoCodec = "";
     string strAudioCodec = "";
     string strAACAudioCodec = "";
     using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
     {
       strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
       strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
       strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
       strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
     }
     //Find the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders");
     IPin pinOut0, pinOut1;
     IPin pinIn0, pinIn1;
     pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
     pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
     if (pinOut0 == null || pinOut1 == null)
     {
       Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader");
       Cleanup();
       return false;
     }
     bool usingAAC = false;
     IEnumMediaTypes enumMediaTypes;
     hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
       {
         Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader");
         usingAAC = true;
       }
     }
     bool usingH264 = false;
     hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
       {
         Log.Info("TSReader2MP4: found H.264 video out pin on tsreader");
         usingH264 = true;
       }
     }
     //Add the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2MP4: add audio/video decoders to graph");
     if (usingH264 == false)
     {
       Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2MP4: unable to add mpeg2 video decoder");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec");
         Cleanup();
         return false;
       }
     }
     if (usingAAC == false)
     {
       Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec");
         Cleanup();
         return false;
       }
     }
     Log.Info("TSReader2MP4: connect tsreader->audio/video decoders");
     //connect output #0 (audio) of tsreader->audio decoder input pin 0
     //connect output #1 (video) of tsreader->video decoder input pin 0
     pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
     pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
     if (pinIn0 == null || pinIn1 == null)
     {
       Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs");
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut0, pinIn0);
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut1, pinIn1);
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     //add encoders, muxer & filewriter
     if (!AddCodecs(graphBuilder, info)) return false;
     //setup graph controls
     mediaControl = graphBuilder as IMediaControl;
     mediaSeeking = tsreaderSource as IMediaSeeking;
     mediaEvt = graphBuilder as IMediaEventEx;
     mediaPos = graphBuilder as IMediaPosition;
     //get file duration
     Log.Info("TSReader2MP4: Get duration of recording");
     long lTime = 5 * 60 * 60;
     lTime *= 10000000;
     long pStop = 0;
     hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
     if (hr == 0)
     {
       long lStreamPos;
       mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
       m_dDuration = lStreamPos;
       lTime = 0;
       mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                 AMSeekingSeekingFlags.NoPositioning);
     }
     double duration = m_dDuration / 10000000d;
     Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration));
     //run the graph to initialize the filters to be sure
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     int maxCount = 20;
     while (true)
     {
       long lCurrent;
       mediaSeeking.GetCurrentPosition(out lCurrent);
       double dpos = (double)lCurrent;
       dpos /= 10000000d;
       System.Threading.Thread.Sleep(100);
       if (dpos >= 2.0d) break;
       maxCount--;
       if (maxCount <= 0) break;
     }
     mediaControl.Stop();
     FilterState state;
     mediaControl.GetState(500, out state);
     GC.Collect();
     GC.Collect();
     GC.Collect();
     GC.WaitForPendingFinalizers();
     graphBuilder.RemoveFilter(mp4Muxer);
     graphBuilder.RemoveFilter(h264Encoder);
     graphBuilder.RemoveFilter(aacEncoder);
     graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter);
     if (!AddCodecs(graphBuilder, info)) return false;
     //Set Encoder quality & Muxer settings
     if (!EncoderSet(graphBuilder, info)) return false;
     //start transcoding - run the graph
     Log.Info("TSReader2MP4: start transcoding");
     //setup flow control
     //need to leverage CBAsePin, CPullPin & IAsyncReader methods.
     IAsyncReader synchVideo = null;
     mediaSample = VideoCodec as IMediaSample;
     hr = synchVideo.SyncReadAligned(mediaSample);
     //So we only parse decoder output whent the encoders are ready.
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
   }
   catch (Exception ex)
   {
     Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message);
     Cleanup();
     return false;
   }
   return true;
 }
 public bool Transcode(TranscodeInfo info, VideoFormat format, Quality quality, Standard standard)
 {
   try
   {
     if (!Supports(format)) return false;
     string ext = System.IO.Path.GetExtension(info.file);
     if (ext.ToLower() != ".ts" && ext.ToLower() != ".mpg")
     {
       Log.Info("TSReader2WMV: wrong file format");
       return false;
     }
     Log.Info("TSReader2WMV: create graph");
     graphBuilder = (IGraphBuilder)new FilterGraph();
     _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder);
     Log.Info("TSReader2WMV: add filesource");
     TsReader reader = new TsReader();
     tsreaderSource = (IBaseFilter)reader;
     //ITSReader ireader = (ITSReader)reader;
     //ireader.SetTsReaderCallback(this);
     //ireader.SetRequestAudioChangeCallback(this);
     IBaseFilter filter = (IBaseFilter)tsreaderSource;
     graphBuilder.AddFilter(filter, "TSReader Source");
     IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource;
     Log.Info("TSReader2WMV: load file:{0}", info.file);
     int hr = fileSource.Load(info.file, null);
     //add audio/video codecs
     string strVideoCodec = "";
     string strH264VideoCodec = "";
     string strAudioCodec = "";
     string strAACAudioCodec = "";
     using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings())
     {
       strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", "");
       strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", "");
       strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", "");
       strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", "");
     }
     //Find the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2WMV: find tsreader compatible audio/video decoders");
     IPin pinOut0, pinOut1;
     IPin pinIn0, pinIn1;
     pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio
     pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video
     if (pinOut0 == null || pinOut1 == null)
     {
       Log.Error("TSReader2WMV: FAILED: unable to get output pins of tsreader");
       Cleanup();
       return false;
     }
     bool usingAAC = false;
     IEnumMediaTypes enumMediaTypes;
     hr = pinOut0.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC)
       {
         Log.Info("TSReader2WMV: found LATM AAC audio out pin on tsreader");
         usingAAC = true;
       }
     }
     bool usingH264 = false;
     hr = pinOut1.EnumMediaTypes(out enumMediaTypes);
     while (true)
     {
       AMMediaType[] mediaTypes = new AMMediaType[1];
       int typesFetched;
       hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched);
       if (hr != 0 || typesFetched == 0) break;
       if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1)
       {
         Log.Info("TSReader2WMV: found H.264 video out pin on tsreader");
         usingH264 = true;
       }
     }
     //Add the type of decoder required for the output video & audio pins on TSReader.
     Log.Info("TSReader2WMV: add audio/video decoders to graph");
     if (usingH264 == false)
     {
       Log.Info("TSReader2WMV: add mpeg2 video decoder:{0}", strVideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2WMV: unable to add mpeg2 video decoder");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2WMV: add h264 video codec:{0}", strH264VideoCodec);
       VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec);
       if (VideoCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add h264 video codec");
         Cleanup();
         return false;
       }
     }
     if (usingAAC == false)
     {
       Log.Info("TSReader2WMV: add mpeg2 audio codec:{0}", strAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add mpeg2 audio codec");
         Cleanup();
         return false;
       }
     }
     else
     {
       Log.Info("TSReader2WMV: add aac audio codec:{0}", strAACAudioCodec);
       AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec);
       if (AudioCodec == null)
       {
         Log.Error("TSReader2WMV: FAILED:unable to add aac audio codec");
         Cleanup();
         return false;
       }
     }
     Log.Info("TSReader2WMV: connect tsreader->audio/video decoders");
     //connect output #0 (audio) of tsreader->audio decoder input pin 0
     //connect output #1 (video) of tsreader->video decoder input pin 0
     pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio
     pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video
     if (pinIn0 == null || pinIn1 == null)
     {
       Log.Error("TSReader2WMV: FAILED: unable to get pins of video/audio codecs");
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut0, pinIn0);
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to connect audio pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     hr = graphBuilder.Connect(pinOut1, pinIn1);
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to connect video pins :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     string outputFilename = System.IO.Path.ChangeExtension(info.file, ".wmv");
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("TSReader2WMV: start pre-run");
     mediaControl = graphBuilder as IMediaControl;
     mediaSeeking = tsreaderSource as IMediaSeeking;
     mediaEvt = graphBuilder as IMediaEventEx;
     mediaPos = graphBuilder as IMediaPosition;
     //get file duration
     long lTime = 5 * 60 * 60;
     lTime *= 10000000;
     long pStop = 0;
     hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                    AMSeekingSeekingFlags.NoPositioning);
     if (hr == 0)
     {
       long lStreamPos;
       mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position
       m_dDuration = lStreamPos;
       lTime = 0;
       mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop),
                                 AMSeekingSeekingFlags.NoPositioning);
     }
     double duration = m_dDuration / 10000000d;
     Log.Info("TSReader2WMV: movie duration:{0}", Util.Utils.SecondsToHMSString((int)duration));
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2WMV: FAILED: unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
     int maxCount = 20;
     while (true)
     {
       long lCurrent;
       mediaSeeking.GetCurrentPosition(out lCurrent);
       double dpos = (double)lCurrent;
       dpos /= 10000000d;
       System.Threading.Thread.Sleep(100);
       if (dpos >= 2.0d) break;
       maxCount--;
       if (maxCount <= 0) break;
     }
     Log.Info("TSReader2WMV: pre-run done");
     Log.Info("TSReader2WMV: Get duration of movie");
     mediaControl.Stop();
     FilterState state;
     mediaControl.GetState(500, out state);
     GC.Collect();
     GC.Collect();
     GC.Collect();
     GC.WaitForPendingFinalizers();
     Log.Info("TSReader2WMV: reconnect mpeg2 video codec->ASF WM Writer");
     graphBuilder.RemoveFilter(fileWriterbase);
     if (!AddWmAsfWriter(outputFilename, quality, standard)) return false;
     Log.Info("TSReader2WMV: Start transcoding");
     hr = mediaControl.Run();
     if (hr != 0)
     {
       Log.Error("TSReader2WMV:FAILED:unable to start graph :0x{0:X}", hr);
       Cleanup();
       return false;
     }
   }
   catch (Exception e)
   {
     // TODO: Handle exceptions.
     Log.Error("unable to transcode file:{0} message:{1}", info.file, e.Message);
     return false;
   }
   return true;
 }