private void OnMediaSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs e) { var instantBuffer = GetBuffer(); var buffer = _mediaDecoder.ReadSample(instantBuffer, instantBuffer.Capacity); MediaStreamSample sample = null; if (buffer.Length > 0) { sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromSeconds(_currentTime)); sample.Processed += OnSampleProcessed; var duration = _mediaDecoder.GetDurationFromBufferSize(buffer.Length); sample.Duration = TimeSpan.FromSeconds(duration); _currentTime += duration; } else { _currentTime = 0.0; _mediaDecoder.Seek(0); } e.Request.Sample = sample; }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { try { if (mediaStreamType == MediaStreamType.Video) { videoController.GetNextVideoFrame(ssrcId, frameStream => { if (frameStream != null) { // Send out the next sample frameStream.Position = 0; var msSamp = new MediaStreamSample( videoDesc, frameStream, 0, frameStream.Length, (DateTime.Now - startTime).Ticks, emptySampleDict); ReportGetSampleCompleted(msSamp); } }); } } catch (Exception ex) { ClientLogger.Debug(ex.ToString()); } }
public static async Task <MediaStreamSample> CreateAudioSample(this AudioTag tag) { var stream = tag.GetDataStream(); var sample = await MediaStreamSample.CreateFromStreamAsync( stream.AsInputStream(), (uint)stream.Length, tag.TimeSpan);//每一段的数据大小 //sample.Duration = tag.TimeSpan;//BUG return(sample); #region MyRegion //Debug.WriteLine(tag.GetDataStream().toString(30)); //var ss = tag.GetDataInputStream(); //var sample = await MediaStreamSample.CreateFromStreamAsync(ss, tag.Count, tag.TimeSpan);//每一段的数据大小 //return sample; /* * var stream = tag.GetDataStream();//.GetDataStream().AsRandomAccessStream().GetInputStreamAt(); * var si = stream.AsInputStream(); * var sample = await MediaStreamSample.CreateFromStreamAsync(si, (uint)stream.Length, tag.TimeSpan); */ //stream.Position = (long)a.Offset;//问题的并发式或交错操作改变了对象的状态,无效此操作 //var ss = stream.AsInputStream(); //A concurrent or interleaved operation changed the state of the object, invalidating this operation. (Exception from HRESULT: 0x8000000C) //var ss = stream.AsRandomAccessStream().GetInputStreamAt(a.Offset);//success //var ss = stream.GetInputStreamAt(a.Offset);//success //sample.Duration = a.TimeSpan - at; //每一段进度条移动距离 //sample.KeyFrame = true; #endregion }
/// <summary> /// Устанвливает следующий сэмпл для запроса. /// </summary> private int SetSample(MediaStreamSourceSampleRequest request) { try { Debug.WriteLine($"Length: {_fileStream.Length}, Offset: {_byteOffset}"); _fileStream.Seek(_byteOffset, SeekOrigin.Begin); int count = _fileStream.Read(_buffer, _bufferOffset, BUFFER_SIZE); IBuffer buffer = _buffer.AsBuffer(_bufferOffset, count); _bufferOffset += count; if (_bufferOffset + BUFFER_SIZE > _bufferSize) { _bufferOffset = 0; } var sample = MediaStreamSample.CreateFromBuffer(buffer, _timeOffset); sample.Duration = _sampleDuration; sample.KeyFrame = true; _byteOffset += count; _timeOffset = _timeOffset.Add(_sampleDuration); request.Sample = sample; return(count); } catch (Exception) { return(0); } }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { // Start with one second of data, rounded up to the nearest block. uint cbBuffer = (uint)AlignUp( _waveFormat.AvgBytesPerSec / 100, _waveFormat.BlockAlign); if (_currentTimeStamp < _duration) { byte[] buffer = new byte[cbBuffer]; FillBuffer(buffer, _currentTimeStamp, _duration); // Send out the next sample using (var stream = new MemoryStream(buffer)) { MediaStreamSample msSamp = new MediaStreamSample( _audioDesc, stream, 0, cbBuffer, _currentTimeStamp, _emptySampleDict); // Move our timestamp and position forward _currentTimeStamp += _waveFormat.AudioDurationFromBufferSize(cbBuffer); ReportGetSampleCompleted(msSamp); } } else // Report EOS { ReportGetSampleCompleted(new MediaStreamSample(_audioDesc, null, 0, 0, 0, _emptySampleDict)); } }
//private DateTime _firstSampleReportedAt = DateTime.MinValue; //private int _samplesReported; protected virtual void ReportSample(MemoryStream memoryStream) { try { //if (_firstSampleReportedAt == DateTime.MinValue) //{ // _firstSampleReportedAt = DateTime.Now; //} //if (++_samplesReported % 200 == 0) //{ // double averageSampleRequestTime = (DateTime.Now - _firstSampleRequestedAt).TotalMilliseconds/_samplesRequested; // double averageSampleReportTime = (DateTime.Now - _firstSampleReportedAt).TotalMilliseconds/_samplesReported; // ClientLogger.Debug("Samples requested:{0}; reported:{1}; avgRequestInterval:{2}; avgReportInterval:{3}", _samplesRequested, _samplesReported, averageSampleRequestTime, averageSampleReportTime); //} var sample = new MediaStreamSample( _mediaStreamDescription, memoryStream, 0, memoryStream.Length, (DateTime.Now - _startTime).Ticks, _emptySampleDict); ReportGetSampleCompleted(sample); } catch (Exception ex) { ClientLogger.Debug(ex.ToString); } }
/// <summary> /// Get a sample from the pool which has a buffer with a given capacity /// and with the associated timestamp. /// </summary> /// <param name="byteSize">The exact size in bytes that the sample buffer needs to accomodate.</param> /// <param name="timestamp">The sample presentation timestamp.</param> /// <returns>The newly created sample</returns> /// <remarks> /// The returned sample's buffer has a <see cref="Windows.Storage.Streams.Buffer.Length"/> property /// set to the input <see cref="byteSize"/>. This is required to be set before creating the sample, /// and should not be modified once the sample was created. /// </remarks> public MediaStreamSample Pop(uint byteSize, System.TimeSpan timestamp) { Buffer buffer; lock (this) { if (_freeBuffers.Count > 0) { buffer = _freeBuffers.Pop(); if (buffer.Capacity < byteSize) { buffer = new Buffer(byteSize); } } else { buffer = new Buffer(byteSize); } _usedBuffers.Enqueue(buffer); // This must be set before calling CreateFromBuffer() below otherwise // the Media Foundation pipeline throws an exception. buffer.Length = byteSize; } // Because the managed wrapper does not allow modifying the timestamp, // need to recreate the sample each time with the correct timestamp. var sample = MediaStreamSample.CreateFromBuffer(buffer, timestamp); sample.Processed += OnSampleProcessed; return(sample); }
void GetVideoSample() { // seems like creating a new stream is only way to avoid out of memory and // actually figure out the correct offset. that can't be right. MemoryStream _frameStream = new MemoryStream(); byte[] buffer = new byte[this.sampleLength]; //_videoStream.Read(buffer, 0, sampleSize); this.inputStream.Read(buffer, 0, this.sampleLength); _frameStream.Write(buffer, 0, buffer.Length); // Send out the next sample MediaStreamSample msSamp = new MediaStreamSample( _videoDescription, _frameStream, 0, this.sampleLength, _currentVideoTimeStamp, _emptySampleDict); _currentVideoTimeStamp += _frameTime; ReportGetSampleCompleted(msSamp); }
public void PlaySound(int samplingRate, byte[] pcmData) { AudioEncodingProperties audioProps = AudioEncodingProperties.CreatePcm((uint)samplingRate, 1, 16); AudioStreamDescriptor audioDesc = new AudioStreamDescriptor(audioProps); MediaStreamSource mss = new MediaStreamSource(audioDesc); bool samplePlayed = false; mss.SampleRequested += (sender, args) => { if (samplePlayed) { return; } IBuffer ibuffer = pcmData.AsBuffer(); MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(ibuffer, TimeSpan.Zero); sample.Duration = TimeSpan.FromSeconds(pcmData.Length / 2.0 / samplingRate); args.Request.Sample = sample; samplePlayed = true; }; mediaElement.SetMediaStreamSource(mss); }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { for (int i = 0; i < numSamples; i++) { if (this.index <this.StartPoint | this.index> this.EndPoint) { this.index = this.StartPoint; } memoryStream.WriteByte(this.sourceData[index]); memoryStream.WriteByte(this.sourceData[index + 1]); memoryStream.WriteByte(this.sourceData[index + 2]); memoryStream.WriteByte(this.sourceData[index + 3]); index += 4; } MediaStreamSample mediaStreamSample = new MediaStreamSample( mediaStreamDescription, memoryStream, currentPosition, bufferByteCount, currentTimeStamp, emptySampleDict); currentTimeStamp += bufferByteCount * 10000000L / byteRate; currentPosition += bufferByteCount; ReportGetSampleCompleted(mediaStreamSample); }
public MediaStreamSample GenerateSample() { // Generate 1s of data var buffer = new byte[2 * m_sampleRate]; var time = Time.TotalSeconds; for (int i = 0; i < m_sampleRate; i++) { Int16 value = (Int16)(Int16.MaxValue * Math.Sin(2 * Math.PI * m_sineFrequency * time * time)); // Chirp sine wave buffer[2 * i] = (byte)(value & 0xFF); buffer[2 * i + 1] = (byte)((value >> 8) & 0xFF); time += (1 / (double)m_sampleRate); } var sample = MediaStreamSample.CreateFromBuffer(buffer.AsBuffer(), Time); sample.Discontinuous = (Time == TimeSpan.Zero); sample.Duration = TimeSpan.FromSeconds(1); Time += TimeSpan.FromSeconds(1); return(sample); }
private void SendSamples() { while (_sampleQueue.Count() > 0 && _outstandingGetVideoSampleCount > 0) { if (!(shutdownEvent.WaitOne(0))) { VideoSample vs = _sampleQueue.Dequeue(); Stream s = System.Runtime.InteropServices.WindowsRuntime.WindowsRuntimeBufferExtensions.AsStream(vs.buffer); // Send out the next sample MediaStreamSample msSamp = new MediaStreamSample( _videoDesc, s, 0, s.Length, (long)vs.hnsPresentationTime, (long)vs.hnsSampleDuration, _emptySampleDict); ReportGetSampleCompleted(msSamp); _outstandingGetVideoSampleCount--; } else { // If video rendering is shutting down we should no longer deliver frames return; } } }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { //System.Diagnostics.Debug.WriteLine(">>>>>>>>>>>>>>GetSampleAsync."); DateTime now = DateTime.Now; TimeSpan ts2 = new TimeSpan(now.Ticks); TimeSpan ts1 = new TimeSpan(startTime.Ticks); TimeSpan ts = ts2.Subtract(ts1); System.Diagnostics.Debug.WriteLine("ts minutes " + ts.Minutes); if (ts.TotalMinutes >= 60) { CallStreamComplete(); } byte[] buffer = new byte[synth.BufferSize]; synth.GetNext(buffer); using (var stream = new MemoryStream(buffer)) { MediaStreamSample msSamp = new MediaStreamSample( _audioDesc, stream, 0, synth.BufferSize, _currentTimeStamp, _emptySampleDict); // Move our timestamp and position forward _currentTimeStamp += synth.BufferSize * 10000000 / AvgBytesPerSec; ReportGetSampleCompleted(msSamp); } }
/// <summary> /// Dequeues a NAL and gives it to the decoder. /// </summary> private void HandleSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { //Log.Verbose("HandleSampleRequested"); Nal nal; lock (usedNals) { //Log.Verbose("usedNals.Dequeue: {0}", usedNals.Count); nal = (usedNals.Count > 0) ? usedNals.Dequeue() : null; } if (nal != null) { args.Request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0)); lock (availableNals) { //Log.Verbose("availableNals.Enqueue"); availableNals.Enqueue(nal); } } else { //Log.Verbose("Deferred"); request = args.Request; deferral = args.Request.GetDeferral(); } }
protected virtual void ReportSample(MemoryStream frameStream) { try { if (frameStream != null) { frameStream.Position = 0; // .Seek(0, SeekOrigin.Begin); // Send out the next sample var msSamp = new MediaStreamSample( _videoDesc, frameStream, 0, frameStream.Length, (DateTime.Now - _startTime).Ticks, _emptySampleDict); ReportGetSampleCompleted(msSamp); } } catch (Exception ex) { ClientLogger.Debug(ex.ToString); } }
private async Task <Tuple <MediaStreamSample, uint> > ParseAACSampleAsync(bool partial = false, byte[] partialBytes = null) { IBuffer buffer = null; MediaStreamSample sample = null; uint sampleLength = 0; if (partial) { buffer = partialBytes.AsBuffer(); sampleLength = aac_adts_sampleSize - (uint)partialBytes.Length; byteOffset += sampleLength; } else { await socketReader.LoadAsync(aac_adts_sampleSize); buffer = socketReader.ReadBuffer(aac_adts_sampleSize); byteOffset += aac_adts_sampleSize; sampleLength = aac_adts_sampleSize; } sample = MediaStreamSample.CreateFromBuffer(buffer, timeOffSet); sample.Duration = aac_adts_sampleDuration; sample.KeyFrame = true; timeOffSet = timeOffSet.Add(aac_adts_sampleDuration); return(new Tuple <MediaStreamSample, uint>(sample, sampleLength)); }
private MediaStreamSample CreateVideoSample(byte[] buf) { if (videoStart.Ticks == 0) { videoStart = DateTime.Now; } // Marshal this buffer so we can safely queue it without worrying about // reuse of the memory backing it byte[] bufCopy = new byte[buf.Length]; Array.Copy(buf, bufCopy, buf.Length); MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(bufCopy.AsBuffer(), DateTime.Now - videoStart); sample.Duration = TimeSpan.Zero; // HACK: Marking all frames as keyframes seems // to keep the decoder from dying after the first // few seconds. sample.KeyFrame = true; if ((buf[4] & 0x1F) == 0x5) { sample.KeyFrame = true; } return(sample); }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { try { MemoryStream rawSampleStream; byte[] rawSample; if ((rawSample = frameSource.GetNextFrame()) != null) { rawSampleStream = new MemoryStream(rawSample); LastPulseSubmittedAt = DateTime.Now; } else { rawSampleStream = new MemoryStream(emptyFrame); } MediaStreamSample sample = new MediaStreamSample( mediaStreamDescription, rawSampleStream, 0, rawSampleStream.Length, (DateTime.Now - startTime).Ticks, emptySampleDict); ReportGetSampleCompleted(sample); } catch (Exception ex) { ClientLogger.Debug(ex.ToString()); } }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance; if (frameStreamOffset + dataSource.FrameBufferSize > dataSource.FrameStreamSize) { dataSource.FrameStream.Seek(0, SeekOrigin.Begin); frameStreamOffset = 0; } Task tsk = dataSource.CameraEffect.GetNewFrameAndApplyEffect().AsTask(); // Wait that the asynchroneous call completes, and proceed by reporting // the MediaElement that new samples are ready. tsk.ContinueWith((task) => { dataSource.FrameStream.Position = 0; MediaStreamSample msSamp = new MediaStreamSample( videoStreamDescription, dataSource.FrameStream, frameStreamOffset, dataSource.FrameBufferSize, currentTime, emptySampleDict); ReportGetSampleCompleted(msSamp); currentTime += frameTime; frameStreamOffset += dataSource.FrameBufferSize; }); }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { int numSamples = 512; int bufferByteCount = channelCount * BitsPerSample / 8 * numSamples; for (int i = 0; i < numSamples; i++) { StereoSample stereoSample = this.SampleMaker.GetSample(); memoryStream.WriteByte((byte)(stereoSample.LeftSample & 0xFF)); memoryStream.WriteByte((byte)(stereoSample.LeftSample >> 8)); memoryStream.WriteByte((byte)(stereoSample.RightSample & 0xFF)); memoryStream.WriteByte((byte)(stereoSample.RightSample >> 8)); } // Send out the next sample MediaStreamSample mediaStreamSample = new MediaStreamSample(mediaStreamDescription, memoryStream, currentPosition, bufferByteCount, currentTimeStamp, emptySampleDict); // Move timestamp and position forward currentTimeStamp += bufferByteCount * 10000000L / byteRate; currentPosition += bufferByteCount; ReportGetSampleCompleted(mediaStreamSample); }
public static async Task <MediaStreamSample> CreateVideoSample(this VideoTag tag, VideoTag FirstTag, bool hasHead) { var stream = tag.createVideoStream(FirstTag, hasHead);// flv.createVideoStream(stream.AsStream(), vi); var sample = await MediaStreamSample.CreateFromStreamAsync( stream.AsInputStream(), (uint)stream.Length, tag.TimeSpan); //每一段的数据大小 //sample.Duration = tag.TimeSpan;//BUG return(sample); #region MyRegion //Debug.WriteLine("CreateAudioSample:" + tag.Count + " " + tag.GetDataStream().Length); //Debug.WriteLine(BitConverter.ToString( tag.GetDataInputStream().AsStreamForRead().ReadBytes(30) )); /* * var stream = tag.createVideoStream(Videos[0], vi <=2); * var si = stream.AsInputStream(); * var sample = await MediaStreamSample.CreateFromStreamAsync(si, (uint)stream.Length, tag.TimeSpan);//每一段的数据大小 */ #endregion }
public void Generate(MediaStreamSourceSampleRequest request) { lock ( m_lock ) { if (m_state != State.Started) { request.Sample = null; return; } if (m_frameAvailable) { Debug.Assert(m_currentFrame is not null); m_frameAvailable = false; request.Sample = MediaStreamSample.CreateFromDirect3D11Surface(m_currentFrame.Surface, m_currentFrame.SystemRelativeTime); } else { m_currentFrame?.Dispose(); m_currentFrame = null; m_sampleRequest = request; m_sampleRequestCompletion = request.GetDeferral(); } } }
private void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) { if (args.Request.StreamDescriptor is VideoStreamDescriptor) { var deferral = args.Request.GetDeferral(); nextSampleReady.WaitOne(); if (nextSample != null) { args.Request.Sample = MediaStreamSample.CreateFromDirect3D11Surface(nextSample.Surface, nextSample.Timestamp - encodingStart); // Manually forcing the surface to be disposed helps with memory consumption // As this is the only consumer, this is safe nextSample.Surface.Dispose(); } else { args.Request.Sample = null; } sampleProcessed.Set(); deferral.Complete(); } }
//private int AlignUp(int a, int b) //{ // int tmp = a + b - 1; // return tmp - (tmp % b); //} protected override void GetSampleAsync(MediaStreamType mediaStreamType) { int numSamples = ChannelCount * 256; int bufferByteCount = BitsPerSample / 8 * numSamples; // fill the stream with noise for (int i = 0; i < numSamples; i++) { short sample = (short)_random.Next( short.MinValue, short.MaxValue); _stream.Write(BitConverter.GetBytes(sample), 0, sizeof(short)); } // Send out the next sample MediaStreamSample msSamp = new MediaStreamSample( _audioDesc, _stream, _currentPosition, bufferByteCount, _currentTimeStamp, _emptySampleDict); // Move our timestamp and position forward _currentTimeStamp += _waveFormat.AudioDurationFromBufferSize( (uint)bufferByteCount); _currentPosition += bufferByteCount; ReportGetSampleCompleted(msSamp); }
protected override void GetSampleAsync( MediaStreamType mediaStreamType) { while (mediaStreamSamples_[mediaStreamType].Count == 0) { Sample sample; Error ec = demuxer_.get_sample(out sample); if (ec == Error.success) { Dictionary<MediaSampleAttributeKeys, string> mediaSampleAttributes = new Dictionary<MediaSampleAttributeKeys, string>(); if (sample.is_sync) { mediaSampleAttributes[MediaSampleAttributeKeys.KeyFrameFlag] = bool.TrueString; } MediaStreamType type = mediaStreamTypes_[(int)sample.index]; MediaStreamSample sample2 = new MediaStreamSample( mediaStreamDescriptions_[type], new System.IO.MemoryStream(sample.data), 0, sample.data.Length, (long)sample.time, mediaSampleAttributes); mediaStreamSamples_[type].Add(sample2); } else { if (ec == Error.would_block) { if (pending_) { System.Threading.Thread.Sleep(100); continue; } else { pending_ = true; System.Threading.ThreadPool.QueueUserWorkItem( GetSamplePending, mediaStreamType); } } else if (ec == Error.stream_end) { ReportGetSampleCompleted(null); } else { ErrorOccurred(ec.ToString()); } return; } } pending_ = false; MediaStreamSample sample3 = mediaStreamSamples_[mediaStreamType][0]; mediaStreamSamples_[mediaStreamType].RemoveAt(0); ReportGetSampleCompleted(sample3); }
private MediaStreamSample GetAudioSample() { var sample = audioSamples[audioSampleIndex]; MediaStreamSample mediaStreamSample = new MediaStreamSample(this.audioStreamDescription, this.mediaStream, sample.Offset, sample.Count, sample.Timestamp, emptyDict); audioSampleIndex++; return(mediaStreamSample); }
/// <summary> /// Parses the next sample from the requested stream and then calls ReportGetSampleCompleted /// to inform its parent MediaElement of the next sample. /// </summary> /// <param name="mediaStreamType"> /// Should always be Audio for this MediaStreamSource. /// </param> protected override void GetSampleAsync(MediaStreamType mediaStreamType) { Dictionary <MediaSampleAttributeKeys, string> emptyDict = new Dictionary <MediaSampleAttributeKeys, string>(); MediaStreamSample audioSample = null; if (this.currentFrame != null) { // Calculate our current position based on the stream's length //// double ratio = (double)this.currentFrameStartPosition / (double)this.audioStreamLength; //// TimeSpan currentPosition = new TimeSpan((long)(this.trackDuration.Ticks * ratio)); // Calculate our current position instead based on the bitrate of the stream (more accurate?) double position = (double)this.currentFrameStartPosition / (double)this.currentFrame.Bitrate; TimeSpan currentPosition = TimeSpan.FromSeconds(position * 8 /* bits per Byte */); // Create a MemoryStream to hold the bytes // FrameSize includes the frame header which we've already read from the previous iteration, so just copy the // header, and then read the remaining bytes this.currentFrame.CopyHeader(buffer); int audioSampleSize = this.currentFrame.FrameSize - MpegFrame.FrameHeaderSize; int c = this.audioStream.Read(buffer, MpegFrame.FrameHeaderSize, audioSampleSize); if (c != audioSampleSize) { // Ran out of bytes trying to read MP3 frame. this.currentFrame = null; audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict); this.ReportGetSampleCompleted(audioSample); return; } this.currentFrameStartPosition += c; using (MemoryStream audioFrameStream = new MemoryStream(buffer)) { // Return the next sample in the stream audioSample = new MediaStreamSample(this.audioStreamDescription, audioFrameStream, 0, this.currentFrame.FrameSize, currentPosition.Ticks, emptyDict); this.ReportGetSampleCompleted(audioSample); // Grab the next frame MpegFrame nextFrame = new MpegFrame(this.audioStream); if ((nextFrame.Version == 1 || nextFrame.Version == 2) && nextFrame.Layer == 3) { this.currentFrameStartPosition += MpegFrame.FrameHeaderSize; this.currentFrame = nextFrame; } else { this.currentFrame = null; } } } else { // We're near the end of the file, or we got an irrecoverable error. // Return a null stream which tells the MediaStreamSource & MediaElement to shut down audioSample = new MediaStreamSample(this.audioStreamDescription, null, 0, 0, 0, emptyDict); this.ReportGetSampleCompleted(audioSample); } }
/// <summary> /// Return the next sample requested /// </summary> /// <param name="mediaStreamType">The stream type that we are getting a sample for</param> protected override void GetSampleAsync(MediaStreamType mediaStreamType) { // Start with one second of data, rounded up to the nearest block. uint bufferSize = (uint)AlignUp( this.wavParser.WaveFormatEx.AvgBytesPerSec, this.wavParser.WaveFormatEx.BlockAlign); // Figure out how much data we have left in the chunk compared to the // data that we need. bufferSize = Math.Min(bufferSize, this.wavParser.BytesRemainingInChunk); if (bufferSize > 0) { this.wavParser.ProcessDataFromChunk(bufferSize); // Send out the next sample MediaStreamSample sample = new MediaStreamSample( this.audioDesc, this.stream, this.currentPosition, bufferSize, this.currentTimeStamp, this.emptySampleDict); // Move our timestamp and position forward this.currentTimeStamp += this.wavParser.WaveFormatEx.AudioDurationFromBufferSize(bufferSize); this.currentPosition += bufferSize; // Loop forever if (Loop && this.wavParser.BytesRemainingInChunk == 0) { this.wavParser.MoveToStartOfChunk(); this.currentPosition = this.startPosition; } ReportGetSampleCompleted(sample); } else if (sendFakeEnding) { // Required to work around bug in Silverlight with short samples that would otherwise only send // ReportGetSampleCompleted once (actually twice due to weird, non-debuggable thread!) // (Add Debug.WriteLine or Assert to see this happen) sendFakeEnding = false; MediaStreamSample sample = new MediaStreamSample( this.audioDesc, this.stream, this.currentPosition, bufferSize, this.currentTimeStamp, this.emptySampleDict); ReportGetSampleCompleted(sample); } else { // Report EOS ReportGetSampleCompleted(new MediaStreamSample(this.audioDesc, null, 0, 0, 0, this.emptySampleDict)); } }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { _updater(_buffer, _bufferSize); var sample = new MediaStreamSample(_audioDescription, _bufferStream, 0, _bufferSize, _timestamp, _emptySampleDict); _timestamp += _bufferSize * 10000000L / _waveFormat.AverageBytesPerSec; // 100 ns ReportGetSampleCompleted(sample); }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { int blocksPlayed = this.Asap.GetBlocksPlayed(); int bufferLen = this.Asap.Generate(buffer, buffer.Length, BitsPerSample == 8 ? ASAPSampleFormat.U8 : ASAPSampleFormat.S16LE); Stream s = bufferLen == 0 ? null : new MemoryStream(buffer); MediaStreamSample mss = new MediaStreamSample(this.MediaStreamDescription, s, 0, bufferLen, blocksPlayed * 10000000L / ASAP.SampleRate, SampleAttributes); ReportGetSampleCompleted(mss); }
public void EnqueueVideoSample(byte[] buf) { // This puts back-pressure in the DU queue in // common. It's needed so that we avoid our queue getting // too large. MediaStreamSample sample = CreateVideoSample(buf); // Wait until there's space to queue queueEmpty.WaitOne(); lock (videoQueueLock) { Debug.Assert(pendingVideoSample == null); pendingVideoSample = sample; } }
public MediaStreamSampleEvents(MediaStreamSample This) { this.This = This; }
protected void SampleOnProcessed(MediaStreamSample mss, object obj) { _freeBuffers.Push(mss.Buffer); #if DEBUG Interlocked.Increment(ref _free); #endif mss.Processed -= SampleOnProcessed; }
private void OnSampleProcessed(MediaStreamSample sender, object args) { _buffersQueue.Enqueue(sender.Buffer); sender.Processed -= OnSampleProcessed; }
public void VideoSampleRequested(MediaStreamSourceSampleRequestedEventArgs args) { lock (videoQueueLock) { if (pendingVideoSample != null) { args.Request.Sample = pendingVideoSample; pendingVideoSample = null; queueEmpty.Set(); return; } } // If we don't have any sample right now, we just return an empty sample. This tells the decoder // that we're still alive here. Doing a sample deferral seems to cause serious lag issues. args.Request.Sample = MediaStreamSample.CreateFromBuffer(new byte[0].AsBuffer(), TimeSpan.Zero); }
protected override void GetSampleAsync(MediaStreamType mediaStreamType) { MediaStreamSample sample; if (m_endCount > 1) { sample = new MediaStreamSample(description, null, 0, 0, 0, m_mediaSampleAttributes); ReportGetSampleCompleted(sample); } else { using (MemoryStream stream = new MemoryStream(BUFFER_BLOCK_SIZE)) using (BinaryWriter writer = new BinaryWriter(stream)) { int index = m_count * BUFFER_SIZE; double volume = Math.Min(MasterVolume, 100) / 100; for (int i = index; i < index + BUFFER_SIZE; i++) { int bufferIndex = i << 1; writer.Write((m_buffer[bufferIndex] * volume).ToShort()); writer.Write((m_buffer[bufferIndex + 1] * volume).ToShort()); } m_count++; m_timestampIndex += m_timestampBlock; sample = new MediaStreamSample(description, stream, 0, BUFFER_BLOCK_SIZE, m_timestampIndex, m_mediaSampleAttributes); ReportGetSampleCompleted(sample); } } if (m_count == m_multiple) { GetSamples(); m_count = 0; if ((m_tracks[Track.TEMPO_TRACK]).End) m_endCount++; } }