Beispiel #1
0
        public SourceReaderFlags GetSample(out Sample sample)
        {
            SourceReaderFlags flags = SourceReaderFlags.None;

            sample = sourceReader.ReadSample(SourceReaderIndex.FirstVideoStream, SourceReaderControlFlags.None, out var actualIndex, out flags, out var timestamp);

            //if (sample != null)
            //{
            //	ProcessSample(sample);
            //}

            return(flags);
        }
Beispiel #2
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="previous">Time of previous sample.</param>
        /// <returns>Time for current sample.</returns>
        protected override DateTime GenerateNext(DateTime previous)
        {
            DateTime          originatingTime = default(DateTime);
            int               streamIndex     = 0;
            SourceReaderFlags flags           = SourceReaderFlags.None;
            long              timestamp       = 0;
            Sample            sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                MediaBuffer buffer           = sample.ConvertToContiguousBuffer();
                int         currentByteCount = 0;
                int         maxByteCount     = 0;
                IntPtr      data             = buffer.Lock(out maxByteCount, out currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp))
                    {
                        sharedImage.Resource.CopyFrom(data);
                        this.Image.Post(sharedImage, originatingTime);
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                    Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                    this.Audio.Post(audioBuffer, originatingTime);
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            if (originatingTime <= previous)
            {
                return(previous + TimeSpan.FromTicks(1)); // To enforce strictly increasing times for the generator
            }

            return(originatingTime);
        }
Beispiel #3
0
        private void PrepareSample(SourceReaderFlags flags, long timestamp, Sample sample)
        {
            if (flags.HasFlag(SourceReaderFlags.StreamTick))
            {
                firstTimestamp = timestamp;
            }

            if (sample != null)
            {
                //Console.WriteLine("time " + time + " Timestamp " + timestamp + " Flags " + flags);
                var sampleDuration  = timestamp - prevTimestamp;
                var sampleTimestamp = timestamp - firstTimestamp;

                sample.SampleTime     = sampleTimestamp;
                sample.SampleDuration = sampleDuration;

                ProcessSample(sample);
            }
            prevTimestamp = timestamp;
        }
Beispiel #4
0
        private void samplerProc()
        {
            try
            {
                long currentSampleTime = 0;
                int  streamRef         = 0;
                SourceReaderFlags sourceReaderFlags = 0;

                int fpsStoreLen = 25;
                var fpsStore    = new Queue <long>();

                Stopwatch sw             = new Stopwatch();
                long      lastSampleTime = -1;
                bool      justUnfreezed  = false;
                double    fps            = double.NaN;

                while (!cancelTokenSource.Token.IsCancellationRequested)
                {
                    bool gonnaFreeze        = WantToFreezeFlag;
                    bool skipBufferedFrames = gonnaFreeze || justUnfreezed;

                    lock (startLocker)
                        if (WantToConfigureReader || reader == null || reader.IsDisposed)
                        {
                            newSourceReader();
                            WantToConfigureReader = false;
                            WantToChangeMediaType = true;
                            fpsStore.Clear();
                        }

                    if (WantToChangeMediaType)
                    {
                        setMediaType();
                        WantToChangeMediaType = false;
                        fpsStore.Clear();
                    }

                    if (skipBufferedFrames)
                    {
                        sw.Restart();
                    }
                    Sample sample = reader.ReadSample(SourceReaderIndex.FirstVideoStream, 0, out streamRef, out sourceReaderFlags, out currentSampleTime);
                    if (skipBufferedFrames)
                    {
                        sw.Stop();
                    }

                    if (sample == null && sourceReaderFlags.HasFlag(SourceReaderFlags.StreamTick))
                    {
                        continue;
                    }

                    if (sourceReaderFlags != SourceReaderFlags.None)
                    {
                        if (sample != null)
                        {
                            sample.Dispose();
                        }
                        throw new Exception("Something went really wrong");
                    }

                    if (skipBufferedFrames)
                    {
                        if (sw.ElapsedMilliseconds <= 5)
                        {
                            SharpDX.Utilities.Dispose(ref sample); lastSampleTime = currentSampleTime; continue;
                        }
                    }

                    justUnfreezed = false;

                    if (lastSampleTime != -1)
                    {
                        fpsStore.Enqueue(currentSampleTime - lastSampleTime);
                        if (fpsStore.Count() > fpsStoreLen)
                        {
                            fpsStore.Dequeue();
                        }
                        fps = 10000000.0 / fpsStore.Average();
                    }
                    lastSampleTime = currentSampleTime;

                    if (cancelTokenSource.Token.IsCancellationRequested)
                    {
                        break;
                    }

                    lock (sampleLocker)
                    {
                        if (lastSample == null || lastSample.TotalLength != sample.TotalLength)
                        {
                            var tp    = reader.GetCurrentMediaType(SourceReaderIndex.FirstVideoStream);
                            var fsize = tp.Get(MediaTypeAttributeKeys.FrameSize);
                            tp.Dispose();
                            long w = fsize >> 32;
                            long h = fsize & 0xFFFF;

                            lastSampleFrameH = (int)h;
                            lastSampleFrameW = (int)w;
                        }

                        SharpDX.Utilities.Dispose(ref lastSample);
                        lastSample = sample;
                    }

                    OnNewFrame?.Invoke(this, new NewFrameEventArgs(gonnaFreeze ? double.NaN : Math.Round(fps, 1)));

                    if (!gonnaFreeze)
                    {
                        continue;
                    }

                    WantToFreezeFlag = false;
                    freezedEvent.Set();
                    kickstartEvent.Reset();
                    kickstartEvent.Wait();
                    justUnfreezed = true;
                    fpsStore.Clear();
                }
            }
            catch (SharpDXException ex)
            {
                if ((ex.ResultCode == SharpDX.MediaFoundation.ResultCode.TopoCodecNotFound ||
                     ex.ResultCode == SharpDX.MediaFoundation.ResultCode.InvalidMediaType))
                {
                    String msg = $"Uncompatible MediaType format for current Source Reader configuration.\nSampler stopped\n\n{ex.Message}";
                    MessageBox.Show(msg, "Oops");
                }
                if (ex.ResultCode == SharpDX.MediaFoundation.ResultCode.VideoRecordingDeviceInvalidated)
                {
                    SharpDX.Utilities.Dispose(ref reader);
                    if (settingsForm != null)
                    {
                        settingsForm.BeginInvoke(new Action(() => settingsForm.Close()));
                    }
                    WantToConfigureReader = true;   //  Recreate the device & reader on possible new connection attempt
                }
                throw;
            }
        }
Beispiel #5
0
 /// <summary>
 /// <p><strong>Applies to: </strong>desktop apps | Metro style apps</p><p>Reads the next sample from the media source.</p>
 /// </summary>
 /// <param name="dwStreamIndex"><dd> <p>The stream to pull data from. The value can be any of the following.</p> <table> <tr><th>Value</th><th>Meaning</th></tr> <tr><td> <dl> <dt>0?0xFFFFFFFB</dt> </dl> </td><td> <p>The zero-based index of a stream.</p> </td></tr> <tr><td><dl> <dt><strong><strong><see cref="SharpDX.MediaFoundation.SourceReaderIndex.FirstVideoStream"/></strong></strong></dt> <dt>0xFFFFFFFC</dt> </dl> </td><td> <p>The first video stream.</p> </td></tr> <tr><td><dl> <dt><strong><strong><see cref="SharpDX.MediaFoundation.SourceReaderIndex.FirstAudioStream"/></strong></strong></dt> <dt>0xFFFFFFFD</dt> </dl> </td><td> <p>The first audio stream.</p> </td></tr> <tr><td><dl> <dt><strong><strong><see cref="SharpDX.MediaFoundation.SourceReaderIndex.AnyStream"/></strong></strong></dt> <dt>0xFFFFFFFE</dt> </dl> </td><td> <p>Get the next available sample, regardless of which stream.</p> </td></tr> </table> <p>?</p> </dd></param>
 /// <param name="dwControlFlags"><dd> <p>A bitwise <strong>OR</strong> of zero or more flags from the <strong><see cref="SharpDX.MediaFoundation.SourceReaderControlFlags"/></strong> enumeration.</p> </dd></param>
 /// <param name="dwActualStreamIndexRef"><dd> <p>Receives the zero-based index of the stream.</p> </dd></param>
 /// <param name="dwStreamFlagsRef"><dd> <p>Receives a bitwise <strong>OR</strong> of zero or more flags from the <strong><see cref="SharpDX.MediaFoundation.SourceReaderFlags"/></strong> enumeration.</p> </dd></param>
 /// <param name="llTimestampRef"><dd> <p>Receives the time stamp of the sample, or the time of the stream event indicated in <em>pdwStreamFlags</em>. The time is given in 100-nanosecond units.</p> </dd></param>
 /// <returns><dd> <p>Receives a reference to the <strong><see cref="SharpDX.MediaFoundation.Sample"/></strong> interface or the value <strong><c>null</c></strong> (see Remarks). If this parameter receives a non-<strong><c>null</c></strong> reference, the caller must release the interface.</p> </dd></returns>
 /// <remarks>
 /// <p>If the requested stream is not selected, the return code is <strong>MF_E_INVALIDREQUEST</strong>. See <strong><see cref="SharpDX.MediaFoundation.SourceReader.SetStreamSelection"/></strong>.</p><p> This method can complete synchronously or asynchronously. If you provide a callback reference when you create the source reader, the method is asynchronous. Otherwise, the method is synchronous. For more information about setting the callback reference, see <see cref="SharpDX.MediaFoundation.SourceReaderAttributeKeys.AsyncCallback"/>.</p>Asynchronous Mode<p>In asynchronous mode:</p><ul> <li>All of the <code>[out]</code> parameters must be <strong><c>null</c></strong>. Otherwise, the method returns <strong>E_INVALIDARG</strong>.</li> <li>The method returns immediately.</li> <li>When the operation completes, the application's <strong><see cref="SharpDX.MediaFoundation.SourceReaderCallback.OnReadSample"/></strong> method is called.</li> <li>If an error occurs, the method can fail either synchronously or asynchronously. Check the return value of <strong>ReadSample</strong>, and also check the <em>hrStatus</em> parameter of <strong><see cref="SharpDX.MediaFoundation.SourceReaderCallback.OnReadSample"/></strong>.</li> </ul>Synchronous Mode<p>In synchronous mode:</p><ul> <li>The <em>pdwStreamFlags</em> and <em>ppSample</em> parameters cannot be <strong><c>null</c></strong>. Otherwise, the method returns <strong>E_POINTER</strong>.</li> <li>The <em>pdwActualStreamIndex</em> and <em>pllTimestamp</em> parameters can be <strong><c>null</c></strong>.</li> <li>The method blocks until the next sample is available.</li> </ul><p> In synchronous mode, if the <em>dwStreamIndex</em> parameter is <strong><see cref="SharpDX.MediaFoundation.SourceReaderIndex.AnyStream"/></strong>, you should pass a non-<strong><c>null</c></strong> value for <em>pdwActualStreamIndex</em>, so that you know which stream delivered the sample.</p><p>This method can return flags in the <em>pdwStreamFlags</em> parameter without returning a media sample in <em>ppSample</em>. Therefore, the <em>ppSample</em> parameter can receive a <strong><c>null</c></strong> reference even when the method succeeds. For example, when the source reader reaches the end of the stream, it returns the <strong><see cref="SharpDX.MediaFoundation.SourceReaderFlags.FEndofstream"/></strong> flag in <em>pdwStreamFlags</em> and sets <em>ppSample</em> to <strong><c>null</c></strong>.</p><p>If there is a gap in the stream, <em>pdwStreamFlags</em> receives the <see cref="SharpDX.MediaFoundation.SourceReaderFlags.FStreamtick"/> flag, <em>ppSample</em> is <strong><c>null</c></strong>, and <em>pllTimestamp</em> indicates the time when the gap occurred. </p><p>This interface is available on Windows?Vista if Platform Update Supplement for Windows?Vista is installed.</p>
 /// </remarks>
 /// <msdn-id>dd374665</msdn-id>
 /// <unmanaged>HRESULT IMFSourceReader::ReadSample([In] unsigned int dwStreamIndex,[In] unsigned int dwControlFlags,[Out, Optional] unsigned int* pdwActualStreamIndex,[Out, Optional] unsigned int* pdwStreamFlags,[Out, Optional] longlong* pllTimestamp,[Out, Optional] IMFSample** ppSample)</unmanaged>
 /// <unmanaged-short>IMFSourceReader::ReadSample</unmanaged-short>
 public SharpDX.MediaFoundation.Sample ReadSample(SourceReaderIndex dwStreamIndex, SourceReaderControlFlags dwControlFlags, out int dwActualStreamIndexRef, out SourceReaderFlags dwStreamFlagsRef, out long llTimestampRef)
 {
     return(ReadSample((int)dwStreamIndex, dwControlFlags, out dwActualStreamIndexRef, out dwStreamFlagsRef, out llTimestampRef));
 }
Beispiel #6
0
 int IMFSourceReaderCallback.OnReadSample(SharpDX.Result hrStatus, int dwStreamIndex, SourceReaderFlags dwStreamFlags, long llTimestamp, IntPtr pSample)
 {
     return(OnReadSample?.Invoke(hrStatus, dwStreamIndex, dwStreamFlags, llTimestamp, pSample) ?? 0);
     //return (int)HResult.S_OK;
 }
Beispiel #7
0
        private int SourceReaderCallback_OnReadSample(SharpDX.Result result, int index, SourceReaderFlags flags, long timestamp, IntPtr pSample)
        {
            // logger.Debug(timestamp + " " + " " + flags + " " + result);
            if (result.Failure)
            {
                //...
                logger.Error("SourceReaderCallback_OnReadSample(...) " + result);


                //state = CaptureState.Stopping;
            }

            if (State != CaptureState.Capturing)
            {
                logger.Warn("Invalid capture state: " + State);

                return(0);
            }

            if (flags != SourceReaderFlags.None)
            {
                logger.Debug(timestamp + " " + " " + flags + " " + result);
            }

            PrepareSample(flags, timestamp, (Sample)pSample);

            syncEvent.Set();

            return(0);
        }
        public void Start()
        {
            logger.Debug("VideoCaptureSource::Start()");
            running = true;

            Task.Run(() =>
            {
                processor.Start();

                int sampleCount = 0;

                try
                {
                    while (running)
                    {
                        int actualIndex         = 0;
                        SourceReaderFlags flags = SourceReaderFlags.None;
                        long timestamp          = 0;
                        var sample = sourceReader.ReadSample(SourceReaderIndex.FirstVideoStream, SourceReaderControlFlags.None, out actualIndex, out flags, out timestamp);

                        try
                        {
                            //Console.WriteLine("#" + sampleCount + " Timestamp " + timestamp + " Flags " + flags);

                            if (sample != null)
                            {
                                //Console.WriteLine("SampleTime " + sample.SampleTime + " SampleDuration " + sample.SampleDuration + " SampleFlags " + sample.SampleFlags);
                                Sample outputSample = null;
                                try
                                {
                                    var res = processor.ProcessSample(sample, out outputSample);

                                    if (res)
                                    {
                                        //Console.WriteLine("outputSample!=null" + (outputSample != null));

                                        var mediaBuffer = outputSample.ConvertToContiguousBuffer();
                                        var ptr         = mediaBuffer.Lock(out int cbMaxLengthRef, out int cbCurrentLengthRef);

                                        //var width = outProcArgs.Width;
                                        //var height = outProcArgs.Height;

                                        var dataBox = device.ImmediateContext.MapSubresource(texture, 0, MapMode.Read, MapFlags.None);

                                        Kernel32.CopyMemory(dataBox.DataPointer, ptr, (uint)cbCurrentLengthRef);

                                        device.ImmediateContext.UnmapSubresource(texture, 0);


                                        device.ImmediateContext.CopyResource(texture, SharedTexture);
                                        device.ImmediateContext.Flush();

                                        OnBufferUpdated();

                                        //GDI.Bitmap bmp = new GDI.Bitmap(width, height, GDI.Imaging.PixelFormat.Format32bppArgb);

                                        //DxTool.TextureToBitmap(texture, bmp);

                                        ////var bmpData = bmp.LockBits(new GDI.Rectangle(0, 0, width, height), GDI.Imaging.ImageLockMode.WriteOnly, bmp.PixelFormat);
                                        ////uint size = (uint)(bmpData.Stride * height);
                                        ////Kernel32.CopyMemory(bmpData.Scan0, ptr, size);
                                        ////bmp.UnlockBits(bmpData);

                                        ////var fileName = @"d:\BMP\" + "#" + sampleCount + "_" + timestamp + ".bmp";
                                        ////bmp.Save(fileName, GDI.Imaging.ImageFormat.Bmp);

                                        //bmp.Dispose();

                                        mediaBuffer.Unlock();
                                        mediaBuffer?.Dispose();
                                    }
                                }
                                catch (Exception ex)
                                {
                                    logger.Error(ex);
                                }
                                finally
                                {
                                    if (outputSample != null)
                                    {
                                        outputSample.Dispose();
                                    }
                                }
                            }
                        }
                        finally
                        {
                            sample?.Dispose();
                        }

                        sampleCount++;
                    }
                }
                catch (Exception ex)
                {
                    logger.Error(ex);
                }
                finally
                {
                    if (processor != null)
                    {
                        processor.Stop();
                    }
                }
            });
        }
Beispiel #9
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="currentTime">The originating time that triggered the current call.</param>
        /// <returns>The originating time at which to capture the next sample.</returns>
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            DateTime          originatingTime = default(DateTime);
            int               streamIndex     = 0;
            SourceReaderFlags flags           = SourceReaderFlags.None;
            long              timestamp       = 0;
            Sample            sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                MediaBuffer buffer           = sample.ConvertToContiguousBuffer();
                int         currentByteCount = 0;
                int         maxByteCount     = 0;
                IntPtr      data             = buffer.Lock(out maxByteCount, out currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedImageTime)
                    {
                        using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp))
                        {
                            sharedImage.Resource.CopyFrom(data);
                            this.Image.Post(sharedImage, originatingTime);
                            this.lastPostedImageTime = originatingTime;
                        }
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured image frame has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted image frame ({this.lastPostedImageTime.TimeOfDay}), as reported by the video stream. This could " +
                                  $"be due to a timing glitch in the video stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedAudioTime)
                    {
                        AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                        Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                        this.Audio.Post(audioBuffer, originatingTime);
                        this.lastPostedAudioTime = originatingTime;
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured audio buffer has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted audio buffer ({this.lastPostedAudioTime.TimeOfDay}), as reported by the audio stream. This could " +
                                  $"be due to a timing glitch in the audio stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            return(originatingTime);
        }