/// <summary> /// GenerateNext is called by the Generator base class when the next sample should be read. /// </summary> /// <param name="previous">Time of previous sample.</param> /// <returns>Time for current sample.</returns> protected override DateTime GenerateNext(DateTime previous) { DateTime originatingTime = default(DateTime); int streamIndex = 0; SourceReaderFlags flags = SourceReaderFlags.None; long timestamp = 0; Sample sample = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp); if (sample != null) { originatingTime = this.start + TimeSpan.FromTicks(timestamp); MediaBuffer buffer = sample.ConvertToContiguousBuffer(); int currentByteCount = 0; int maxByteCount = 0; IntPtr data = buffer.Lock(out maxByteCount, out currentByteCount); if (streamIndex == this.imageStreamIndex) { using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp)) { sharedImage.Resource.CopyFrom(data); this.Image.Post(sharedImage, originatingTime); } } else if (streamIndex == this.audioStreamIndex) { AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat); Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount); this.Audio.Post(audioBuffer, originatingTime); } buffer.Unlock(); buffer.Dispose(); sample.Dispose(); } if (flags == SourceReaderFlags.Endofstream) { return(DateTime.MaxValue); // Used to indicated there is no more data } if (originatingTime <= previous) { return(previous + TimeSpan.FromTicks(1)); // To enforce strictly increasing times for the generator } return(originatingTime); }
public BufferHelper(Sample sample, int sampleHeight) { buffer2D = null; buffer = sample.ConvertToContiguousBuffer(); buffer2D = buffer.QueryInterfaceOrNull <SharpDX.MediaFoundation.Buffer2D>(); int length = 0, pitch = 0; if (buffer2D != null) { byte[] arr = new byte[IntPtr.Size]; buffer2D.Lock2D(arr, out pitch); length = buffer2D.ContiguousLength; Data = new IntPtr(BitConverter.ToInt32(arr, 0)); } else { int curlen; Data = buffer.Lock(out length, out curlen); pitch = length / sampleHeight; } Length = length; Pitch = pitch; }
/// <summary> /// Gets the decoded PCM samples. See remarks. /// </summary> /// <param name="startingPositionInSeconds">The starting position in seconds.</param> /// <returns>An enumerator of pointer to PCM decoded data with the same format as returned by <see cref="WaveFormat"/>.</returns> /// <remarks> /// This method is only working as a single enumerator at a time. /// The <see cref="SetSourceStream(System.IO.Stream)"/> must be set before calling <see cref="GetSamples()"/> /// </remarks> public IEnumerable<DataPointer> GetSamples(TimeSpan startingPositionInSeconds) { // A new reader is setup, so initialize it. lock (sourceReaderLock) { // If the reader was changed if (nextSourceReader != null) { if (sourceReader != null) sourceReader.Dispose(); sourceReader = nextSourceReader; nextSourceReader = null; } } // Make sure that any prior call CleanupAndDispose(); CheckIfDisposed(); // Set the position sourceReader.SetCurrentPosition((long)(startingPositionInSeconds.TotalSeconds * 1e7)); while (true) { int streamIndex; SourceReaderFlags flags; long time; CheckIfDisposed(); using (currentSample = sourceReader.ReadSample(SourceReaderIndex.FirstAudioStream, SourceReaderControlFlags.None, out streamIndex, out flags, out time)) { if ((flags & SourceReaderFlags.Endofstream) != 0) break; CheckIfDisposed(); using (currentBuffer = currentSample.ConvertToContiguousBuffer()) { int bufferMaxLength; int bufferCurrentLength; CheckIfDisposed(); var ptr = currentBuffer.Lock(out bufferMaxLength, out bufferCurrentLength); yield return new DataPointer(ptr, bufferCurrentLength); // Warning, because the yield could never return here, currentBuffer and currentSample should be disposed when disposing this object or when // calling it again on the GetSamples method. // In case a Dispose occured while decoding if (currentBuffer == null) break; currentBuffer.Unlock(); } } } // They have been disposed, so we can just clear them currentBuffer = null; currentSample = null; }
/// <summary> /// GenerateNext is called by the Generator base class when the next sample should be read. /// </summary> /// <param name="currentTime">The originating time that triggered the current call.</param> /// <returns>The originating time at which to capture the next sample.</returns> protected override DateTime GenerateNext(DateTime currentTime) { DateTime originatingTime = default(DateTime); int streamIndex = 0; SourceReaderFlags flags = SourceReaderFlags.None; long timestamp = 0; Sample sample = this.sourceReader.ReadSample(SourceReaderIndex.AnyStream, 0, out streamIndex, out flags, out timestamp); if (sample != null) { originatingTime = this.start + TimeSpan.FromTicks(timestamp); MediaBuffer buffer = sample.ConvertToContiguousBuffer(); int currentByteCount = 0; int maxByteCount = 0; IntPtr data = buffer.Lock(out maxByteCount, out currentByteCount); if (streamIndex == this.imageStreamIndex) { // Detect out of order originating times if (originatingTime > this.lastPostedImageTime) { using (var sharedImage = ImagePool.GetOrCreate(this.videoWidth, this.videoHeight, Imaging.PixelFormat.BGR_24bpp)) { sharedImage.Resource.CopyFrom(data); this.Image.Post(sharedImage, originatingTime); this.lastPostedImageTime = originatingTime; } } else if (!this.dropOutOfOrderPackets) { throw new InvalidOperationException( $"The most recently captured image frame has a timestamp ({originatingTime.TimeOfDay}) which is before " + $"that of the last posted image frame ({this.lastPostedImageTime.TimeOfDay}), as reported by the video stream. This could " + $"be due to a timing glitch in the video stream. Set the 'dropOutOfOrderPackets' " + $"parameter to true to handle this condition by dropping " + $"packets with out of order timestamps."); } } else if (streamIndex == this.audioStreamIndex) { // Detect out of order originating times if (originatingTime > this.lastPostedAudioTime) { AudioBuffer audioBuffer = new AudioBuffer(currentByteCount, this.waveFormat); Marshal.Copy(data, audioBuffer.Data, 0, currentByteCount); this.Audio.Post(audioBuffer, originatingTime); this.lastPostedAudioTime = originatingTime; } else if (!this.dropOutOfOrderPackets) { throw new InvalidOperationException( $"The most recently captured audio buffer has a timestamp ({originatingTime.TimeOfDay}) which is before " + $"that of the last posted audio buffer ({this.lastPostedAudioTime.TimeOfDay}), as reported by the audio stream. This could " + $"be due to a timing glitch in the audio stream. Set the 'dropOutOfOrderPackets' " + $"parameter to true to handle this condition by dropping " + $"packets with out of order timestamps."); } } buffer.Unlock(); buffer.Dispose(); sample.Dispose(); } if (flags == SourceReaderFlags.Endofstream) { return(DateTime.MaxValue); // Used to indicated there is no more data } return(originatingTime); }