Пример #1
0
        private void SetupSampleBuffer(MfVideoArgs args)
        {
            logger.Debug("SetupSampleBuffer(...)");

            int width  = args.Width;
            int height = args.Height;

            //if (width % 2 != 0)
            //{// должно быть четным...
            //    width++;
            //}

            //if (height % 2 != 0)
            //{
            //    height++;
            //}

            Format format = MfTool.GetDXGIFormatFromVideoFormatGuid(args.Format);

            if (format == Format.Unknown)
            {
                throw new NotSupportedException("Format not suppored " + args.Format);
            }

            var _descr = new Texture2DDescription
            {
                Format            = format,
                Width             = width,
                Height            = height,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1 },
            };

            bufTexture = new Texture2D(device, _descr);

            MediaBuffer mediaBuffer = null;

            try
            {
                MediaFactory.CreateDXGISurfaceBuffer(typeof(Texture2D).GUID, bufTexture, 0, false, out mediaBuffer);
                bufSample = MediaFactory.CreateSample();
                bufSample.AddBuffer(mediaBuffer);
            }
            finally
            {
                mediaBuffer?.Dispose();
            }
        }
Пример #2
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="currentTime">The originating time that triggered the current call.</param>
        /// <returns>The originating time at which to capture the next sample.</returns>
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            DateTime          originatingTime = default(DateTime);
            int               streamIndex     = 0;
            SourceReaderFlags flags           = SourceReaderFlags.None;
            long              timestamp       = 0;
            Sample            sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                MediaBuffer buffer           = sample.ConvertToContiguousBuffer();
                int         currentByteCount = 0;
                int         maxByteCount     = 0;
                IntPtr      data             = buffer.Lock(out maxByteCount, out currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp))
                    {
                        sharedImage.Resource.CopyFrom(data);
                        this.Image.Post(sharedImage, originatingTime);
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                    Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                    this.Audio.Post(audioBuffer, originatingTime);
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            return(originatingTime);
        }
Пример #3
0
        /// <summary>
        ///     Reads a resampled sequence of bytes from the <see cref="DmoResampler" /> and advances the position within the
        ///     stream by the
        ///     number of bytes read.
        /// </summary>
        /// <param name="buffer">
        ///     An array of bytes. When this method returns, the <paramref name="buffer" /> contains the specified
        ///     byte array with the values between <paramref name="offset" /> and (<paramref name="offset" /> +
        ///     <paramref name="count" /> - 1) replaced by the bytes read from the current source.
        /// </param>
        /// <param name="offset">
        ///     The zero-based byte offset in the <paramref name="buffer" /> at which to begin storing the data
        ///     read from the current stream.
        /// </param>
        /// <param name="count">The maximum number of bytes to read from the current source.</param>
        /// <returns>The total number of bytes read into the buffer.</returns>
        public override int Read(byte[] buffer, int offset, int count)
        {
            lock (LockObj)
            {
                int read = 0;
                while (read < count)
                {
                    MediaObject mediaObject = Resampler.MediaObject;
                    if (mediaObject.IsReadyForInput(0))
                    {
                        var bytesToRead = (int)OutputToInput(count - read);
                        _readBuffer = _readBuffer.CheckBuffer(bytesToRead);
                        int bytesRead = base.Read(_readBuffer, 0, bytesToRead);
                        if (bytesRead <= 0)
                        {
                            break;
                        }

                        if (_disposed)
                        {
                            break;
                        }

                        if (InputBuffer.MaxLength < bytesRead)
                        {
                            InputBuffer.Dispose();
                            InputBuffer = new MediaBuffer(bytesRead);
                        }
                        InputBuffer.Write(_readBuffer, 0, bytesRead);

                        mediaObject.ProcessInput(0, InputBuffer);

                        OutputBuffer.Reset();
                        do
                        {
                            var outputBuffer = (MediaBuffer)OutputBuffer.Buffer;
                            if (outputBuffer.MaxLength < count)
                            {
                                outputBuffer.Dispose();
                                OutputBuffer.Buffer = new MediaBuffer(count);
                            }
                            OutputBuffer.Buffer.SetLength(0);

                            mediaObject.ProcessOutput(ProcessOutputFlags.None, new[] { OutputBuffer }, 1);

                            if (OutputBuffer.Length <= 0)
                            {
                                Debug.WriteLine("DmoResampler::Read: No data in output buffer.");
                                break;
                            }

                            OutputBuffer.Read(buffer, offset + read);
                            read += OutputBuffer.Length;
                        } while (/*_outputBuffer.DataAvailable*/ false); //todo: Implement DataAvailable
                    }
                    else
                    {
                        Debug.WriteLine("Case of not ready for input is not implemented yet."); //todo: .
                    }
                }

                return(read);
            }
        }
Пример #4
0
        public void Encode(Texture2D texture)
        {
            // var device = encoder?.device;

            if (device != null)
            {
                using (var sharedRes = texture.QueryInterface <SharpDX.DXGI.Resource>())
                {
                    using (var sharedTexture = device.OpenSharedResource <Texture2D>(sharedRes.SharedHandle))
                    {
                        device.ImmediateContext.CopyResource(sharedTexture, bufTexture);
                    }
                }
            }

            Sample inputSample = null;

            try
            {
                MediaBuffer mediaBuffer = null;
                try
                {
                    MediaFactory.CreateDXGISurfaceBuffer(IID.D3D11Texture2D, bufTexture, 0, false, out mediaBuffer);
                    inputSample = MediaFactory.CreateSample();
                    inputSample.AddBuffer(mediaBuffer);

                    inputSample.SampleTime     = 0;
                    inputSample.SampleDuration = 0;
                }
                finally
                {
                    mediaBuffer?.Dispose();
                }

                if (processor != null)
                {
                    Sample processedSample = null;
                    try
                    {
                        bool result = processor.ProcessSample(inputSample, out processedSample);
                        if (result)
                        {
                            encoder.ProcessSample(processedSample);
                            //EncodeSample(processedSample);
                        }
                    }
                    finally
                    {
                        processedSample?.Dispose();
                    }
                }
                else
                {
                    encoder.ProcessSample(inputSample);
                    //EncodeSample(inputSample);
                }
            }
            finally
            {
                inputSample?.Dispose();
            }
        }
Пример #5
0
        /// <summary>
        /// GenerateNext is called by the Generator base class when the next sample should be read.
        /// </summary>
        /// <param name="currentTime">The originating time that triggered the current call.</param>
        /// <returns>The originating time at which to capture the next sample.</returns>
        protected override DateTime GenerateNext(DateTime currentTime)
        {
            DateTime          originatingTime = default(DateTime);
            int               streamIndex     = 0;
            SourceReaderFlags flags           = SourceReaderFlags.None;
            long              timestamp       = 0;
            Sample            sample          = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp);

            if (sample != null)
            {
                originatingTime = this.start + TimeSpan.FromTicks(timestamp);
                MediaBuffer buffer           = sample.ConvertToContiguousBuffer();
                int         currentByteCount = 0;
                int         maxByteCount     = 0;
                IntPtr      data             = buffer.Lock(out maxByteCount, out currentByteCount);

                if (streamIndex == this.imageStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedImageTime)
                    {
                        using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp))
                        {
                            sharedImage.Resource.CopyFrom(data);
                            this.Image.Post(sharedImage, originatingTime);
                            this.lastPostedImageTime = originatingTime;
                        }
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured image frame has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted image frame ({this.lastPostedImageTime.TimeOfDay}), as reported by the video stream. This could " +
                                  $"be due to a timing glitch in the video stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }
                else if (streamIndex == this.audioStreamIndex)
                {
                    // Detect out of order originating times
                    if (originatingTime > this.lastPostedAudioTime)
                    {
                        AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat);
                        Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount);
                        this.Audio.Post(audioBuffer, originatingTime);
                        this.lastPostedAudioTime = originatingTime;
                    }
                    else if (!this.dropOutOfOrderPackets)
                    {
                        throw new InvalidOperationException(
                                  $"The most recently captured audio buffer has a timestamp ({originatingTime.TimeOfDay}) which is before " +
                                  $"that of the last posted audio buffer ({this.lastPostedAudioTime.TimeOfDay}), as reported by the audio stream. This could " +
                                  $"be due to a timing glitch in the audio stream. Set the 'dropOutOfOrderPackets' " +
                                  $"parameter to true to handle this condition by dropping " +
                                  $"packets with out of order timestamps.");
                    }
                }

                buffer.Unlock();
                buffer.Dispose();
                sample.Dispose();
            }

            if (flags == SourceReaderFlags.Endofstream)
            {
                return(DateTime.MaxValue); // Used to indicated there is no more data
            }

            return(originatingTime);
        }