public void UpdateSample(Bitmap bmp, MediaBuffer mb) { var g = Graphics.FromImage(bmp); var text = DateTime.Now.ToString("HH:mm:ss.fff"); var font = new System.Drawing.Font(FontFamily.GenericMonospace, 120); var textSize = g.MeasureString(text, font); g.FillRectangle(Brushes.Black, 0f, 0f, textSize.Width, textSize.Height); g.DrawString(text, font, Brushes.Yellow, 0f, 0f); var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.ReadOnly, bmp.PixelFormat); var size = data.Stride * data.Height; var pBuffer = mb.Lock(out int cbMaxLen, out int cbCurLen); Kernel32.CopyMemory(pBuffer, data.Scan0, (uint)size); //Marshal.Copy(testArgb, 0, pBuffer, testArgb.Length); mb.CurrentLength = size; mb.Unlock(); bmp.UnlockBits(data); g.Dispose(); //bmp.Dispose(); }
private void Unlock() { if (buffer2D != null) { buffer2D.Unlock2D(); } else { buffer.Unlock(); } Data = IntPtr.Zero; }
/// <summary> /// GenerateNext is called by the Generator base class when the next sample should be read. /// </summary> /// <param name="previous">Time of previous sample.</param> /// <returns>Time for current sample.</returns> protected override DateTime GenerateNext(DateTime previous) { DateTime originatingTime = default(DateTime); int streamIndex = 0; SourceReaderFlags flags = SourceReaderFlags.None; long timestamp = 0; Sample sample = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp); if (sample != null) { originatingTime = this.start + TimeSpan.FromTicks(timestamp); MediaBuffer buffer = sample.ConvertToContiguousBuffer(); int currentByteCount = 0; int maxByteCount = 0; IntPtr data = buffer.Lock(out maxByteCount, out currentByteCount); if (streamIndex == this.imageStreamIndex) { using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp)) { sharedImage.Resource.CopyFrom(data); this.Image.Post(sharedImage, originatingTime); } } else if (streamIndex == this.audioStreamIndex) { AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat); Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount); this.Audio.Post(audioBuffer, originatingTime); } buffer.Unlock(); buffer.Dispose(); sample.Dispose(); } if (flags == SourceReaderFlags.Endofstream) { return(DateTime.MaxValue); // Used to indicated there is no more data } if (originatingTime <= previous) { return(previous + TimeSpan.FromTicks(1)); // To enforce strictly increasing times for the generator } return(originatingTime); }
private static BitmapSource CreateBitmapSource(MediaBuffer contiguousBuffer, int frameWidth, int frameHeight) { try { var thumbnailBuffer = contiguousBuffer.Lock(out _, out var thumbnailBufferSize); return(BitmapSource.Create( frameWidth, frameHeight, 96, 96, PixelFormats.Bgr32, null, thumbnailBuffer, thumbnailBufferSize, frameWidth * 4)); } finally { contiguousBuffer.Unlock(); } }
private void Decode(byte[] nal, double time) { try { var encodedSample = MediaFactory.CreateSample(); try { using (MediaBuffer mb = MediaFactory.CreateMemoryBuffer(nal.Length)) { var dest = mb.Lock(out int cbMaxLength, out int cbCurrentLength); //logger.Debug(sampleCount + " Marshal.Copy(...) " + nal.Length); Marshal.Copy(nal, 0, dest, nal.Length); mb.CurrentLength = nal.Length; mb.Unlock(); encodedSample.AddBuffer(mb); if (!double.IsNaN(time)) { var sampleTime = MfTool.SecToMfTicks(time); //(long)(time * 10_000_000); encodedSample.SampleTime = sampleTime; } } var res = decoder.ProcessSample(encodedSample, OnSampleDecoded); if (!res) { //... } } finally { encodedSample?.Dispose(); } } catch (Exception ex) { logger.Error(ex); } }
/// <summary> /// Gets the decoded PCM samples. See remarks. /// </summary> /// <param name="startingPositionInSeconds">The starting position in seconds.</param> /// <returns>An enumerator of pointer to PCM decoded data with the same format as returned by <see cref="WaveFormat"/>.</returns> /// <remarks> /// This method is only working as a single enumerator at a time. /// The <see cref="SetSourceStream(System.IO.Stream)"/> must be set before calling <see cref="GetSamples()"/> /// </remarks> public IEnumerable<DataPointer> GetSamples(TimeSpan startingPositionInSeconds) { // A new reader is setup, so initialize it. lock (sourceReaderLock) { // If the reader was changed if (nextSourceReader != null) { if (sourceReader != null) sourceReader.Dispose(); sourceReader = nextSourceReader; nextSourceReader = null; } } // Make sure that any prior call CleanupAndDispose(); CheckIfDisposed(); // Set the position sourceReader.SetCurrentPosition((long)(startingPositionInSeconds.TotalSeconds * 1e7)); while (true) { int streamIndex; SourceReaderFlags flags; long time; CheckIfDisposed(); using (currentSample = sourceReader.ReadSample(SourceReaderIndex.FirstAudioStream, SourceReaderControlFlags.None, out streamIndex, out flags, out time)) { if ((flags & SourceReaderFlags.Endofstream) != 0) break; CheckIfDisposed(); using (currentBuffer = currentSample.ConvertToContiguousBuffer()) { int bufferMaxLength; int bufferCurrentLength; CheckIfDisposed(); var ptr = currentBuffer.Lock(out bufferMaxLength, out bufferCurrentLength); yield return new DataPointer(ptr, bufferCurrentLength); // Warning, because the yield could never return here, currentBuffer and currentSample should be disposed when disposing this object or when // calling it again on the GetSamples method. // In case a Dispose occured while decoding if (currentBuffer == null) break; currentBuffer.Unlock(); } } } // They have been disposed, so we can just clear them currentBuffer = null; currentSample = null; }
public void ProcessData(byte[] data, double time) { try { var encodedSample = MediaFactory.CreateSample(); try { using (MediaBuffer mb = MediaFactory.CreateMemoryBuffer(data.Length)) { try { var dest = mb.Lock(out int cbMaxLength, out int cbCurrentLength); Marshal.Copy(data, 0, dest, data.Length); mb.CurrentLength = data.Length; } finally { mb.Unlock(); } encodedSample.AddBuffer(mb); } if (!double.IsNaN(time) && time > 0) { // может глючить рендерер если метки будут кривые!! // TODO: сделать валидацию вр.меток до декодера и после... var sampleTime = MfTool.SecToMfTicks(time); if (presentationAdjust == long.MaxValue) { var presentaionTime = presentationClock.Time; presentationAdjust = presentaionTime - sampleTime; } encodedSample.SampleTime = sampleTime; // + presentationAdjust; encodedSample.SampleDuration = 0; // MfTool.SecToMfTicks(0.033); //logger.Debug(">>>>>>>>>>> " + sampleTime); } else { encodedSample.SampleTime = 0; encodedSample.SampleDuration = 0; } //logger.Debug("ProcessData " + time); var res = decoder.ProcessSample(encodedSample, OnSampleDecoded, OnMediaTypeChanged); if (!res) { logger.Debug("decoder.ProcessSample() " + res); //... } } finally { encodedSample?.Dispose(); } } catch (Exception ex) { logger.Error(ex); } }
/// <summary> /// GenerateNext is called by the Generator base class when the next sample should be read. /// </summary> /// <param name="currentTime">The originating time that triggered the current call.</param> /// <returns>The originating time at which to capture the next sample.</returns> protected override DateTime GenerateNext(DateTime currentTime) { DateTime originatingTime = default(DateTime); int streamIndex = 0; SourceReaderFlags flags = SourceReaderFlags.None; long timestamp = 0; Sample sample = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp); if (sample != null) { originatingTime = this.start + TimeSpan.FromTicks(timestamp); MediaBuffer buffer = sample.ConvertToContiguousBuffer(); int currentByteCount = 0; int maxByteCount = 0; IntPtr data = buffer.Lock(out maxByteCount, out currentByteCount); if (streamIndex == this.imageStreamIndex) { // Detect out of order originating times if (originatingTime > this.lastPostedImageTime) { using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp)) { sharedImage.Resource.CopyFrom(data); this.Image.Post(sharedImage, originatingTime); this.lastPostedImageTime = originatingTime; } } else if (!this.dropOutOfOrderPackets) { throw new InvalidOperationException( $"The most recently captured image frame has a timestamp ({originatingTime.TimeOfDay}) which is before " + $"that of the last posted image frame ({this.lastPostedImageTime.TimeOfDay}), as reported by the video stream. This could " + $"be due to a timing glitch in the video stream. Set the 'dropOutOfOrderPackets' " + $"parameter to true to handle this condition by dropping " + $"packets with out of order timestamps."); } } else if (streamIndex == this.audioStreamIndex) { // Detect out of order originating times if (originatingTime > this.lastPostedAudioTime) { AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat); Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount); this.Audio.Post(audioBuffer, originatingTime); this.lastPostedAudioTime = originatingTime; } else if (!this.dropOutOfOrderPackets) { throw new InvalidOperationException( $"The most recently captured audio buffer has a timestamp ({originatingTime.TimeOfDay}) which is before " + $"that of the last posted audio buffer ({this.lastPostedAudioTime.TimeOfDay}), as reported by the audio stream. This could " + $"be due to a timing glitch in the audio stream. Set the 'dropOutOfOrderPackets' " + $"parameter to true to handle this condition by dropping " + $"packets with out of order timestamps."); } } buffer.Unlock(); buffer.Dispose(); sample.Dispose(); } if (flags == SourceReaderFlags.Endofstream) { return(DateTime.MaxValue); // Used to indicated there is no more data } return(originatingTime); }