/// <summary> /// Read 'duration' ticks from the start of the stream. Note: payload param is only significant for audio and video /// </summary> public DBStreamPlayer(int streamID, long duration, PayloadType payload) { this.streamID = streamID; currentIndex = 0; indexCount = 0; streamEndReached = false; //Get Stream parameters int maxFrameSize; int maxFrameCount; int maxBufferSize; DatabaseUtility.GetStreamStatistics(streamID, out firstTick, out maxFrameSize, out maxFrameCount, out maxBufferSize); //Allocate buffer and indices array frame = new BufferChunk(maxBufferSize); indices = new Index[maxFrameCount]; //Init start and end this.start = firstTick; this.end = start + duration; startingTick = start; endingTick = start - 1; //Set the stream MediaType mediaType = null; if (payload == PayloadType.dynamicVideo) { mediaType = ProfileUtility.StreamIdToVideoMediaType(streamID); } else if (payload == PayloadType.dynamicAudio) { mediaType = ProfileUtility.StreamIdToAudioMediaType(streamID); } }
/// <summary> /// Make sure all the streams for this StreamMgr have compatible compressed media types. /// Also check against the media type, optionally supplied as an argument. /// Log details about any problems we find. /// </summary> /// In no-recompression scenarios including the preview we check this and warn the user, /// though we can't do anything about it. /// <param name="prevMT">An additional MediaType to check against, or null if none</param> /// <param name="log">Where to write details about any problems found</param> /// <returns>always true</returns> public bool ValidateCompressedMT(MediaType prevMT, LogMgr log) { MediaType lastMT = prevMT; for (int i = 0; i < streamPlayers.Length; i++) { if (payload == PayloadType.dynamicVideo) { if (!ProfileUtility.CompareVideoMediaTypes((MediaTypeVideoInfo)streamPlayers[i].StreamMediaType, (MediaTypeVideoInfo)lastMT)) { Debug.WriteLine("incompatible video mediatype found."); log.WriteLine("Warning: A change in the media type was found in the video stream from " + this.cname + " beginning at " + streamPlayers[i].Start.ToString() + ". Without recompression, this may cause " + " a problem with the output. Using recompression should resolve the issue. "); log.ErrorLevel = 5; } } else if (payload == PayloadType.dynamicAudio) { if (!ProfileUtility.CompareAudioMediaTypes((MediaTypeWaveFormatEx)streamPlayers[i].StreamMediaType, (MediaTypeWaveFormatEx)lastMT)) { Debug.WriteLine("incompatible mediatype found."); log.WriteLine("Warning: A change in the media type was found in the audio stream from " + this.cname + " beginning at " + streamPlayers[i].Start.ToString() + ". Without recompression, this may cause " + " a problem with the output. Using recompression should resolve the issue. "); log.ErrorLevel = 5; } } lastMT = streamPlayers[i].StreamMediaType; } return(true); }
private static void verifyStream(DBStreamPlayer streamPlayer) { BufferChunk frame; long streamTime; BufferChunk sample; bool keyframe; long refTime = 0; long lastStreamTime = 0; long timediff = 0; DateTime dt; while (streamPlayer.GetNextFrame(out frame, out streamTime)) { try { sample = ProfileUtility.FrameToSample(frame, out keyframe); } catch (Exception ex) { dt = new DateTime(streamTime); Console.WriteLine("FrameToSample failed at: " + dt.ToString() + "; Sampletime=" + streamTime.ToString()); Console.WriteLine(ex.ToString()); continue; } if (refTime == 0) { refTime = streamTime; } timediff = streamTime - lastStreamTime; // Look for large intervals //if (timediff > 500000L) { // dt = new DateTime(streamTime); // Console.WriteLine("Sample: " + dt.ToString() + "; Sampletime=" + streamTime.ToString() + "; length=" + sample.Length.ToString() + ";interval=" + timediff.ToString()); //} // Look for large samples //if (sample.Length > 90000) { // dt = new DateTime(streamTime); // Console.WriteLine("Sample: " + dt.ToString() + "; Sampletime=" + streamTime.ToString() + "; length=" + sample.Length.ToString() + ";interval=" + timediff.ToString()); //} // Look for small samples //if (sample.Length < 300) { // dt = new DateTime(streamTime); // Console.WriteLine("Sample: " + dt.ToString() + "; Sampletime=" + streamTime.ToString() + "; length=" + sample.Length.ToString() + ";interval=" + timediff.ToString()); //} lastStreamTime = streamTime; } DateTime dt1 = new DateTime(refTime); DateTime dt2 = new DateTime(lastStreamTime); Console.WriteLine("Started at " + dt1.ToString() + "; Ended at " + dt2.ToString() + " (" + lastStreamTime.ToString() + " ticks)" + " after duration ticks =" + (lastStreamTime - refTime).ToString()); }
/// <summary> /// Return the next sample, compressed or uncompressed as appropriate. /// </summary> /// <param name="sample">The bits</param> /// <param name="timestamp">Absolute timestamp in ticks</param> /// <param name="keyframe">True if this sample is compressed and is a video keyframe</param> /// <param name="newstream">True if this sample is uncompressed and /// is from a different stream than the previous sample (thus may have a different media type).</param> /// <returns>True if a valid sample is returned</returns> public bool GetNextSample(out BufferChunk sample, out long timestamp, out bool keyframe, out bool newstream) { BufferChunk frame; keyframe = false; newstream = false; if (compressed) { for (int i = 0; i < streamPlayers.Length; i++) { if (streamPlayers[i].GetNextFrame(out frame, out timestamp)) { sample = ProfileUtility.FrameToSample(frame, out keyframe); return(true); } } } else { for (int i = 0; i < fileStreamPlayers.Length; i++) { while (fileStreamPlayers[i].GetNextSample(out sample, out timestamp)) { if (timestamp >= startTime) //skip past frames that may be returned due to the look behind. { currentFSPGuid = fileStreamPlayers[i].xGuid; if (fileStreamPlayers[i].AudioMediaType != null) { currentChannels = fileStreamPlayers[i].AudioMediaType.WaveFormatEx.Channels; } //PRI3: instead of newstream, we could track the MT, and raise a flag only when it changes. if (i != lastFSP) { newstream = true; } lastFSP = i; return(true); } } } } sample = null; timestamp = 0; return(false); }
private static void verifyStream(DBStreamPlayer streamPlayer) { BufferChunk frame; long streamTime; BufferChunk sample; bool keyframe; long refTime = 0; long lastStreamTime = 0; while (streamPlayer.GetNextFrame(out frame, out streamTime)) { try { sample = ProfileUtility.FrameToSample(frame, out keyframe); } catch (Exception ex) { DateTime dt = new DateTime(streamTime); Console.WriteLine("FrameToSample failed at: " + dt.ToString() + "; Sampletime=" + streamTime.ToString()); Console.WriteLine(ex.ToString()); continue; } if (refTime == 0) { refTime = streamTime; } //DateTime dt = new DateTime(streamTime); //Console.WriteLine("Sample: " + dt.ToString() + "; Sampletime=" + streamTime.ToString() + "; length=" + sample.Length.ToString()); Console.WriteLine(ex.ToString()); lastStreamTime = streamTime; } DateTime dt1 = new DateTime(refTime); DateTime dt2 = new DateTime(lastStreamTime); Console.WriteLine("Started at " + dt1.ToString() + "; Ended at " + dt2.ToString() + " (" + lastStreamTime.ToString() + " ticks)" + " after duration ticks =" + (lastStreamTime - refTime).ToString()); }
/// <summary> /// Recompress audio from mixer into a temp file using the native profile. This is used to implement mixing /// in the 'norecompression' scenario. /// </summary> /// <param name="progressTracker"></param> /// <returns></returns> public bool Recompress(ProgressTracker progressTracker) { cancel = false; if (audioMgr.Length == 0) { return(false); } //String useProfile; ProfileData profileData = null; if (this.compatibleStreamID >= 0) { profileData = ProfileUtility.StreamIdToProfileData(compatibleStreamID, MSR.LST.Net.Rtp.PayloadType.dynamicAudio); //Debug.WriteLine("Mixer.Recompress: using audio profile from streamID: " + compatibleStreamID.ToString()); } else { //Under what circumstances could we get here?? profileData = audioMgr[0].StreamProfileData; } WMWriter wmWriter = new WMWriter(); wmWriter.Init(); if (!wmWriter.ConfigProfile(profileData)) { return(false); } String tempFileName = Utility.GetTempFilePath("wma"); wmWriter.ConfigFile(tempFileName); wmWriter.GetInputProps(); wmWriter.ConfigAudio(audioMgr[0].GetUncompressedAudioMediaType()); wmWriter.Start(); //Write samples progressTracker.CurrentValue = 0; BufferChunk audioSample = null; long audioTime = long.MaxValue; long refTime = 0, endTime = 0; long lastWriteTime = 0; while (!cancel) { if (audioSample == null) { endTime = audioTime; if (!GetNextSample(out audioSample, out audioTime)) { break; } } if (audioSample != null) { //write audio if (refTime == 0) { refTime = audioTime; } //Debug.WriteLine("mixer.Recompress write audio: " + (audioTime-refTime).ToString() + ";length=" + audioSample.Length.ToString()); lastWriteTime = audioTime - refTime; wmWriter.WriteAudio((uint)audioSample.Length, audioSample, (ulong)(audioTime - refTime)); audioSample = null; } else { break; } progressTracker.CurrentValue = (int)(lastWriteTime / (Constants.TicksPerSec)); } wmWriter.Stop(); wmWriter.Cleanup(); wmWriter = null; //Prepare a filestreamPlayer to read back compressed samples. fileStreamPlayer = new FileStreamPlayer(tempFileName, refTime, endTime, true, -1); return(true); }
/// <summary> /// Write each stream from DBStreamPlayer to a WM file, then create FileStreamPlayers for each. /// It is necessary to do this before reading uncompressed samples, or using any of the /// methods that return uncompressed MediaTypes. /// This can be a long-running process. It can be cancelled with the Stop method. /// </summary> /// <returns>False if we failed to configure the native profile</returns> public bool ToRawWMFile(ProgressTracker progressTracker) { if (cancel) { return(true); } String tmpfile = ""; fileStreamPlayers = new FileStreamPlayer[streamPlayers.Length]; for (int i = 0; i < streams.Length; i++) { streamProfileData = ProfileUtility.StreamIdToProfileData(streams[i], payload); if (payload == PayloadType.dynamicVideo) { tmpfile = Utility.GetTempFilePath("wmv"); //nativeProfile = ProfileUtility.MakeNativeVideoProfile(streams[i]); } else { tmpfile = Utility.GetTempFilePath("wma"); //nativeProfile = ProfileUtility.MakeNativeAudioProfile(streams[i]); } WMWriter wmWriter = new WMWriter(); wmWriter.Init(); //if (!wmWriter.ConfigProfile(nativeProfile,"",0)) if (!wmWriter.ConfigProfile(StreamProfileData)) { return(false); } wmWriter.ConfigFile(tmpfile); wmWriter.ConfigNullProps(); //wmWriter.SetCodecInfo(payload); wmWriter.Start(); long streamTime = long.MaxValue; long refTime = 0; long endTime = 0; long lastWriteTime = 0; BufferChunk frame; BufferChunk sample; bool keyframe; bool discontinuity; discontinuity = true; //Catch exceptions to work around the rare case of data corruption. //Oddly in one case where this occurred it did not occur if the segments were short enough while (streamPlayers[i].GetNextFrame(out frame, out streamTime)) { try { sample = ProfileUtility.FrameToSample(frame, out keyframe); } catch { DateTime dt = new DateTime(streamTime); Console.WriteLine("Possible data corruption in stream: " + this.payload + ";" + this.cname + " at " + dt.ToString() + " (" + streamTime.ToString() + ")"); continue; } if (refTime == 0) { refTime = streamTime; } lastWriteTime = streamTime - refTime; try { if (payload == PayloadType.dynamicVideo) { //Debug.WriteLine("Write video: " + (streamTime-refTime).ToString() + ";length=" + sample.Length.ToString()); wmWriter.WriteCompressedVideo((ulong)(streamTime - refTime), (uint)sample.Length, (byte[])sample, keyframe, discontinuity); } else { //Debug.WriteLine("Write audio: " + (streamTime-refTime).ToString() + ";length=" + sample.Length.ToString()); wmWriter.WriteCompressedAudio((ulong)(streamTime - refTime), (uint)sample.Length, (byte[])sample); } } catch { DateTime dt = new DateTime(streamTime); Console.WriteLine("Failed to write. Possible data corruption in stream: " + this.payload + ";" + this.cname + " at " + dt.ToString() + " (" + streamTime.ToString() + ")"); } if (discontinuity) { discontinuity = false; } endTime = streamTime; if (cancel) { break; } progressTracker.CurrentValue = (int)(lastWriteTime / Constants.TicksPerSec); //Debug.WriteLine("StreamMgr.ToRawWMFile: ProgressTracker currentValue=" + progressTracker.CurrentValue.ToString() + // ";streamTime=" + streamTime.ToString()); } wmWriter.Stop(); wmWriter.Cleanup(); wmWriter = null; fileStreamPlayers[i] = new FileStreamPlayer(tmpfile, refTime, endTime, false, streams[i]); if (cancel) { break; } } return(true); }
public FileStreamPlayer(String filename, long start, long end, bool compressed, int streamID) { this.streamID = streamID; this.filename = filename; this.start = start; this.end = end; this.duration = (ulong)(end - start); outOfData = false; this.guid = Guid.NewGuid(); //create IWMSyncReader and open the file. uint hr = WMFSDKFunctions.WMCreateSyncReader(null, 0, out reader); IntPtr fn = Marshal.StringToCoTaskMemUni(filename); reader.Open(fn); Marshal.FreeCoTaskMem(fn); //Verify that the file contains one stream. uint outputcnt; reader.GetOutputCount(out outputcnt); Debug.Assert(outputcnt == 1); //Extract the MediaType for the stream. uint cmt = 0; IntPtr ipmt; IWMOutputMediaProps outputProps; reader.GetOutputProps(0, out outputProps); outputProps.GetMediaType(IntPtr.Zero, ref cmt); ipmt = Marshal.AllocCoTaskMem((int)cmt); outputProps.GetMediaType(ipmt, ref cmt); byte[] bmt = new byte[cmt]; Marshal.Copy(ipmt, bmt, 0, (int)cmt); BufferChunk bc = new BufferChunk(bmt); byte[] cd; GUID majorTypeGUID; outputProps.GetType(out majorTypeGUID); if (WMGuids.ToGuid(majorTypeGUID) == WMGuids.WMMEDIATYPE_Video) { vmt = new MediaTypeVideoInfo(); ProfileUtility.ReconstituteBaseMediaType((MediaType)vmt, bc); ProfileUtility.ReconstituteVideoFormat(vmt, bc, out cd); //Note: This is a special case which we would like to generalize: The default output format for the //12bpp video was found not to return any uncompressed samples. Setting this particular case to RGB 24 fixed it. if ((!compressed) && (vmt.VideoInfo.BitmapInfo.BitCount == 12)) { SetVideoOutputProps(); } } else if (WMGuids.ToGuid(majorTypeGUID) == WMGuids.WMMEDIATYPE_Audio) { amt = new MediaTypeWaveFormatEx(); ProfileUtility.ReconstituteBaseMediaType((MediaType)amt, bc); ProfileUtility.ReconstituteAudioFormat(amt, bc, out cd); } //if compressed is set, retrieve stream samples if (compressed) { reader.SetReadStreamSamples(1, 1); } }