internal void RegisterAudioMediaType(UW.CSE.MDShow.MediaType mt, int index) { if (audioBuffers.ContainsKey(index)) { ((AudioBuffer)audioBuffers[index]).AudioMediaType = mt; } }
private const int SIZE_IN_SECONDS = 10; // Buffer size in seconds, given the media type. #endregion #region Constructor public AudioBuffer(uint index, String cname) { this.index = index; this.cname = cname; myMediaType = null; SilenceSize = 0; Buffer = null; PresTime = null; QuietBuffer = null; AddAudio = 0; SkipAudio = 0; SampleSize = 0; SampleCount = 0; WriteOffset = 0; ReadOffset = 0; SamplesReady = 0; BufferSize = 0; BufferPos = 0; LeftoverBytes = 0; samplesWritten = 0; streamStopTime = 0; bytesBuffered = 0; started = false; sampleReceived = false; AudioQuantumResetEvent = new ManualResetEvent(false); }
/// <summary> /// Allocate and configure one of the temp buffers. These are to be used in preparation for /// switching sources while encoding. /// </summary> /// <param name="index"></param> /// <param name="mt"></param> public void CreateTempBuffer(int index, UW.CSE.MDShow.MediaType mt) { if ((tmpVideoBuffer != null) && (tmpVideoBuffer.Index == index)) { tmpVideoMediaType = mt; tmpVideoBuffer.Create(mt); } else if ((tmpAudioBuffer != null) && (tmpAudioBuffer.Index == index)) { tmpAudioMediaType = mt; tmpAudioBuffer.AudioMediaType = mt; tmpAudioBuffer.Create(); } }
public MediaBuffer(uint maxAudioStreams) { eventLog = new EventLog("WMG", ".", "WMGCore"); started = false; writing = false; stopNow = false; nextIndex = 0; audioBuffers = new Hashtable(); audioMediaType = null; videoMediaType = null; TimeZero = DateTime.MinValue; //indicates unassigned. this.maxAudioStreams = maxAudioStreams; audioEndTime = 0; videoSwitchCompletedResetEvent = new ManualResetEvent(true); }
/// <summary> /// Query the videobuffer to find out if the last sample at or after markOutTime has been read. /// If it has, swap the tmpVideoBuffer in place of videobuffer. /// </summary> private void CheckVideoSourceChange() { if (videoBuffer.MarkOutTime == 0) { return; } Debug.WriteLine("CheckVideoSourceChange: mo=" + videoBuffer.MarkOutTime.ToString() + " lastRead=" + videoBuffer.LastReadTime.ToString() + " now=" + DateTime.Now.Ticks.ToString()); //Here we wait for up to 5 seconds for the old video buffer to finish writing up through the mark out time. //If we have ultra-low frame rate video (such as screen streaming) the timeout does get used. //The impact of not using the timeout is that we have potential for buffer overruns in the new buffer. if ((videoBuffer.LastReadTime >= videoBuffer.MarkOutTime) || (DateTime.Now.Ticks > (long)(videoBuffer.MarkOutTime) + TimeSpan.FromSeconds(5).Ticks)) { videoSourceChangeError = ""; lock (this) { videoBuffer = tmpVideoBuffer; tmpVideoBuffer = null; if ((wmWriter != null) && (wmWriter.ConfigVideo(tmpVideoMediaType))) { videoMediaType = tmpVideoMediaType; //success. } else { videoSourceChangeError = "Failed to reset video media type."; Debug.WriteLine("Failed to reset video media type."); } } videoSwitchCompletedResetEvent.Set(); } }
/// <summary> /// Configure media types. Must do this before creating buffers. /// </summary> /// <returns></returns> public void SetMediaTypes(UW.CSE.MDShow.MediaType audioType, UW.CSE.MDShow.MediaType videoType) { audioMediaType = audioType; videoMediaType = videoType; }
/// <summary> /// Allocates the storage used by the buffer. After this method call /// the buffer is ready to be started. /// </summary> /// <param name="mt"></param> /// <returns></returns> /// Can throw OutOfMemoryException. public bool Create(UW.CSE.MDShow.MediaType mt) { myMediaType = mt; UW.CSE.MDShow.MediaTypeVideoInfo vi = myMediaType.ToMediaTypeVideoInfo(); FrameSize = vi.VideoInfo.BitmapInfo.SizeImage; FrameDuration = (uint)vi.VideoInfo.AvgTimePerFrame; Debug.Assert(FrameSize > 0, "VideoBuffer received bogus media type"); //Come up with an integer number of frames per second. There will be some // round-off error which should be ignored, Then we'll ceil up to the next int. // In fact it turns out we can't really trust the VideoFrameDuration value provided // by the filter graph. We will just do this as a sanity check, but to get the // actual frame rate, we will need to count samples we receive. double numerator = 10000000; double denom = FrameDuration; double fps = numerator / denom; int ifps = (int)fps * 10000; // throw away an estimated roundoff error fps = ifps / 10000.0; ifps = (int)Math.Ceiling(fps); // if it's still not an integer, err on the high side. Debug.Assert(ifps <= MAX_FPS, "VideoBuffer assumes " + MAX_FPS.ToString() + "fps or less"); Debug.WriteLine("VideoBuffer.Create calculated fps=" + ifps.ToString() + " framesize=" + FrameSize.ToString()); this.estimatedFps = (ifps <= 30) ? 30 : MAX_FPS; // Start assuming we'll use the maximum buffer size FrameCount = MAX_BUFFER / FrameSize; if (FrameCount > (this.estimatedFps * SIZE_IN_SECONDS)) { // Scale it back so as not to overkill if framesize is small enough FrameCount = this.estimatedFps * SIZE_IN_SECONDS; } //If we can't get as much memory as we initially request, try scaling back up to a point. while (true) { try { Buffer = new BufferChunk((int)(FrameSize * FrameCount)); break; } catch (OutOfMemoryException) { if (FrameCount <= this.estimatedFps) { throw; } FrameCount = (uint)((double)FrameCount * 0.7); Debug.WriteLine("Warning: VideoBuffer failed to get requested memory. Scaling buffer down to " + FrameCount.ToString() + " frames."); } } Buffer.Length = (int)(FrameSize * FrameCount); PresTime = new ulong[FrameCount]; WriteOffset = 0; ReadOffset = 0; FramesReady = 0; streamStopTime = 0; TotalFrames = 0; started = false; sampleReceived = false; GotVideoPeriod = false; bufferOverrun = false; return(true); }