public static void ThrowExceptionOnError(this HResult hr) { MFError.ThrowExceptionForHR(hr); }
// Sets the intended aspect ratio. public void SetSourceContentHint(MFVideoSrcContentHintFlags nFlags) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_SOURCE_CONTENT_HINT, (int)nFlags); MFError.ThrowExceptionForHR(hr); }
// Sets how chroma was sampled for a Y'Cb'Cr' video media type. public void SetChromaSiting(MFVideoChromaSubsampling nSampling) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_VIDEO_CHROMA_SITING, (int)nSampling); MFError.ThrowExceptionForHR(hr); }
// Retrieves the approximate data rate of the video stream. public void GetAverageBitRate(out int pRate) { HResult hr = GetMediaType().GetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, out pRate); MFError.ThrowExceptionForHR(hr); }
// Sets the maximum number of frames from one key frame to the next. public void SetMaxKeyframeSpacing(int nSpacing) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_MAX_KEYFRAME_SPACING, nSpacing); MFError.ThrowExceptionForHR(hr); }
// Retrieves a media type that was wrapped by the MFWrapMediaType function. public void Unwrap(out IMFMediaType ppOriginal) { HResult hr = MFExtern.MFUnwrapMediaType(GetMediaType(), out ppOriginal); MFError.ThrowExceptionForHR(hr); }
// Sets the default stride. Only appropriate for uncompressed data formats. public void SetDefaultStride(int nStride) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_DEFAULT_STRIDE, nStride); MFError.ThrowExceptionForHR(hr); }
private void Throw(HResult hr) { MFError.ThrowExceptionForHR(hr); }
protected MFAttributes() { MFError hrthrowonerror = MFExtern.MFCreateAttributes( out m_Attribs, DEFAULTATTRIBUTECOUNT); }
private static void SetArea(IMFAttributes ia, Guid g, MFVideoArea a) { PropVariant pv = new PropVariant(a); MFError throwonhr = ia.SetItem(g, pv); }
//------------------------------------------------------------------- // DrawFrame // // Draw the video frame. //------------------------------------------------------------------- public HResult DrawFrame(IMFMediaBuffer pCaptureDeviceBuffer) { if (m_convertFn == null) { return(HResult.MF_E_INVALIDREQUEST); } HResult hr = HResult.S_OK; IntPtr pbScanline0; int lStride = 0; Result res; Surface pSurf = null; Surface pBB = null; if (m_pDevice == null || m_pSwapChain == null) { return(HResult.S_OK); } // Helper object to lock the video buffer. using (VideoBufferLock xbuffer = new VideoBufferLock(pCaptureDeviceBuffer)) { hr = TestCooperativeLevel(); if (Failed(hr)) { goto done; } // Lock the video buffer. This method returns a pointer to the first scan // line in the image, and the stride in bytes. hr = xbuffer.LockBuffer(m_lDefaultStride, m_height, out pbScanline0, out lStride); if (Failed(hr)) { goto done; } // Get the swap-chain surface. pSurf = m_pSwapChain.GetBackBuffer(0); // Lock the swap-chain surface and get Graphic stream object. DataRectangle dr = pSurf.LockRectangle(LockFlags.NoSystemLock); try { using (dr.Data) { // Convert the frame. This also copies it to the Direct3D surface. m_convertFn(dr.Data.DataPointer, dr.Pitch, pbScanline0, lStride, m_width, m_height); } } finally { res = pSurf.UnlockRectangle(); MFError.ThrowExceptionForHR(res.Code); } } // Color fill the back buffer. pBB = m_pDevice.GetBackBuffer(0, 0); m_pDevice.ColorFill(pBB, Color.FromArgb(0, 0, 0x80)); // Blit the frame. Rectangle r = new Rectangle(0, 0, m_width, m_height); res = m_pDevice.StretchRectangle(pSurf, r, pBB, m_rcDest, TextureFilter.Linear); hr = (HResult)res.Code; if (res.IsSuccess) { // Present the frame. res = m_pDevice.Present(); hr = (HResult)res.Code; } done: SafeRelease(pBB); SafeRelease(pSurf); return(hr); }
private string LogAttributeValueByIndex(IMFAttributes pAttr, int index, ref CaptureFormat capFormat) { //string pGuidName = null; //string pGuidValName = null; Guid guid = Guid.Empty; PropVariant var = new PropVariant(); //PropVariantInit(&var); int hr = pAttr.GetItemByIndex(index, out guid, var); MFError.ThrowExceptionForHR(hr); if (guid == MFAttributesClsid.MF_MT_FRAME_RATE) { //Trace.WriteLine("MF_MT_FRAME_RATE"); int lower = (int)var.GetULong(); //Trace.WriteLine("LogAttr1_1 = " + lower.ToString()); int upperbits = (int)(var.GetULong() >> 32); //Trace.WriteLine("LogAttr1_2 = " + upperbits.ToString()); float fr = (float)upperbits / (float)lower; capFormat.Framerate = fr; Trace.WriteLine("FrameRate = " + fr.ToString()); return("Framerate=" + fr.ToString() + ", "); } else if (guid == MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MAX) { //Trace.WriteLine("MF_MT_FRAME_RATE_RANGE_MAX"); int lower = (int)var.GetULong(); //Trace.WriteLine("LogAttr1_1 = " + lower.ToString()); int upperbits = (int)(var.GetULong() >> 32); //Trace.WriteLine("LogAttr1_2 = " + upperbits.ToString()); //return "MaxFramerate=" + fr.ToString() + ", "; } else if (guid == MFAttributesClsid.MF_MT_FRAME_RATE_RANGE_MIN) { //Trace.WriteLine("MF_MT_FRAME_RATE_RANGE_MIN"); int lower = (int)var.GetULong(); //Trace.WriteLine("LogAttr1_1 = " + lower.ToString()); int upperbits = (int)(var.GetULong() >> 32); float fr = (float)upperbits / (float)lower; //return "MinFramerate=" + fr.ToString() + ", "; //Trace.WriteLine("LogAttr1_2 = " + upperbits.ToString()); } else if (guid == MFAttributesClsid.MF_MT_FRAME_SIZE) { //Trace.WriteLine("MF_MT_FRAME_SIZE"); int lower = (int)var.GetULong(); // Trace.WriteLine("LogAttr1_1 = " + lower.ToString()); int upperbits = (int)(var.GetULong() >> 32); //Trace.WriteLine("LogAttr1_2 = " + upperbits.ToString()); capFormat.Height = lower; capFormat.Width = upperbits; Trace.WriteLine("Resolution=" + lower.ToString() + "X" + upperbits.ToString()); return("Resolution=" + lower.ToString() + "X" + upperbits.ToString()); } else if (guid == MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO) { //Trace.WriteLine("MF_MT_PIXEL_ASPECT_RATIO"); int lower = (int)var.GetULong(); //Trace.WriteLine("LogAttr1_1 = " + lower.ToString()); int upperbits = (int)(var.GetULong() >> 32); //Trace.WriteLine("LogAttr1_2 = " + upperbits.ToString()); } else if (guid == MFAttributesClsid.MF_MT_GEOMETRIC_APERTURE) { //Trace.WriteLine("MF_MT_GEOMETRIC_APERTURE"); //Trace.WriteLine("LogAttr2 = " + var.ToString()); } else if (guid == MFAttributesClsid.MF_MT_MINIMUM_DISPLAY_APERTURE) { //Trace.WriteLine("MF_MT_MINIMUM_DISPLAY_APERTURE"); //Trace.WriteLine("LogAttr2 = " + var.ToString()); } else if (guid == MFAttributesClsid.MF_MT_PAN_SCAN_APERTURE) { //Trace.WriteLine("MF_MT_PAN_SCAN_APERTURE"); //Trace.WriteLine("LogAttr2 = " + var.ToString()); } else if (guid == MFAttributesClsid.MF_MT_SUBTYPE) { capFormat.PixelFormat = var.GetGuid(); } else { Trace.WriteLine("Unknown attr " + guid.ToString() + " val = " + var.ToString()); } return(""); }
protected void CreateOutputNode( IMFStreamDescriptor pSourceSD, out IMFTopologyNode ppNode ) { IMFTopologyNode pNode = null; IMFMediaTypeHandler pHandler = null; IMFActivate pRendererActivate = null; Guid guidMajorType = Guid.Empty; MFError throwonhr; // Get the stream ID. int streamID = 0; try { HResult hr; hr = pSourceSD.GetStreamIdentifier(out streamID); // Just for debugging, ignore any failures. if (MFError.Failed(hr)) { TRACE("IMFStreamDescriptor::GetStreamIdentifier" + hr.ToString()); } // Get the media type handler for the stream. throwonhr = pSourceSD.GetMediaTypeHandler(out pHandler); // Get the major media type. throwonhr = pHandler.GetMajorType(out guidMajorType); // Create a downstream node. throwonhr = MFExtern.MFCreateTopologyNode(MFTopologyType.OutputNode, out pNode); // Create an IMFActivate object for the renderer, based on the media type. if (MFMediaType.Audio == guidMajorType) { // Create the audio renderer. TRACE(string.Format("Stream {0}: audio stream", streamID)); throwonhr = MFExtern.MFCreateAudioRendererActivate(out pRendererActivate); } else if (MFMediaType.Video == guidMajorType) { // Create the video renderer. TRACE(string.Format("Stream {0}: video stream", streamID)); throwonhr = MFExtern.MFCreateVideoRendererActivate(m_hwndVideo, out pRendererActivate); } else { TRACE(string.Format("Stream {0}: Unknown format", streamID)); throw new COMException("Unknown format", (int)HResult.E_FAIL); } // Set the IActivate object on the output node. throwonhr = pNode.SetObject(pRendererActivate); // Return the IMFTopologyNode pointer to the caller. ppNode = pNode; } catch { // If we failed, release the pNode SafeRelease(pNode); throw; } finally { // Clean up. SafeRelease(pHandler); SafeRelease(pRendererActivate); } }
public bool ProcessSample(IMFSample pSample, out int plNextSleep) { HResult hr; long hnsPresentationTime = 0; long hnsTimeNow = 0; long hnsSystemTime = 0; bool bPresentNow = true; plNextSleep = 0; if (m_pClock != null) { // Get the sample's time stamp. It is valid for a sample to // have no time stamp. try { hr = pSample.GetSampleTime(out hnsPresentationTime); MFError.ThrowExceptionForHR(hr); // Get the clock time. (But if the sample does not have a time stamp, // we don't need the clock time.) hr = m_pClock.GetCorrelatedTime(0, out hnsTimeNow, out hnsSystemTime); MFError.ThrowExceptionForHR(hr); } catch { } // Calculate the time until the sample's presentation time. // A negative value means the sample is late. long hnsDelta = hnsPresentationTime - hnsTimeNow; if (m_fRate < 0) { // For reverse playback, the clock runs backward. Therefore the delta is reversed. hnsDelta = -hnsDelta; } if (hnsDelta < -m_PerFrame_1_4th) { // This sample is late. bPresentNow = true; } else if (hnsDelta > (3 * m_PerFrame_1_4th)) { // This sample is still too early. Go to sleep. plNextSleep = Utils.MFTimeToMsec(hnsDelta - (3 * m_PerFrame_1_4th)); // Adjust the sleep time for the clock rate. (The presentation clock runs // at m_fRate, but sleeping uses the system clock.) plNextSleep = (int)(plNextSleep / Math.Abs(m_fRate)); // Don't present yet. bPresentNow = false; } } if (bPresentNow) { m_pCB.PresentSample(pSample, hnsPresentationTime); // pSample released by caller along with DeQueue } return(bPresentNow); }
// Sets the subtype GUID. public void SetSubType(Guid guid) { HResult hr = GetMediaType().SetGUID(MFAttributesClsid.MF_MT_SUBTYPE, guid); MFError.ThrowExceptionForHR(hr); }
public IcyScheme(string sUrl) { m_Position = 0; m_SinceLast = 0; m_bShutdown = false; m_uri = new Uri(sUrl); m_ConnectEvent = new ManualResetEvent(false); HttpWebRequest request = (HttpWebRequest)WebRequest.Create(m_uri); ServicePoint point = request.ServicePoint; point.ReceiveBufferSize = DefaultReceiveBufferSize * 2; request.UserAgent = "NSPlayer/12.00.7601.23471"; request.Headers.Add("Icy-Metadata:1"); bool OldUnsafe, UseOldUnsafe; UseOldUnsafe = AllowUnsafeHeaderParsing(true, out OldUnsafe); // Parts of BeginGetResponse are still done synchronously, // so we do it this way. Task.Factory.FromAsync <WebResponse>(request.BeginGetResponse, request.EndGetResponse, null) .ContinueWith(task => { if (UseOldUnsafe) { AllowUnsafeHeaderParsing(OldUnsafe, out OldUnsafe); } if (task.Status == TaskStatus.RanToCompletion) { try { WebResponse wr = task.Result; // Turn the headers into metadata, plus store them as an // IMFAttribute LoadResponseHeaders(wr.Headers); // The only part of the HttpWebRequest that we keep. m_ResponseStream = wr.GetResponseStream(); // In order to (pretend to) support seeking, we save off a block // of data. Any seeks to the first DefaultReceiveBufferSize bytes // are resolved from this data. m_PositionReal = long.MaxValue; m_FirstBlock = new byte[DefaultReceiveBufferSize]; WaitRead(m_FirstBlock, 0, DefaultReceiveBufferSize); m_PositionReal = 0; // Used to tell WaitForConnect that the connection is complete. m_ConnectEvent.Set(); } catch { Dispose(); } } else { Dispose(); } }); // While we are waiting for the connect/cache to complete, finish // the constructor. m_MetaData = new MetaDataContainer(); MFError hrthrowonerror = MFExtern.MFCreateEventQueue(out m_events); m_FixNames = new Dictionary <string, string>(13); // Note that the keys are (intentionally) all lower case. m_FixNames["icy-name"] = "WM/RadioStationName"; m_FixNames["x-audiocast-name"] = "WM/RadioStationName"; m_FixNames["icy-genre"] = "WM/Genre"; m_FixNames["x-audiocast-genre"] = "WM/Genre"; m_FixNames["icy-url"] = "WM/PromotionURL"; m_FixNames["x-audiocast-url"] = "WM/PromotionURL"; m_FixNames["x-audiocast-description"] = "Description"; m_FixNames["x-audiocast-artist"] = "Author"; m_FixNames["x-audiocast-album"] = "WM/AlbumTitle"; m_FixNames["x-audiocast-br"] = "BitRate"; m_FixNames["icy-br"] = "BitRate"; m_FixNames["streamtitle"] = "Title"; m_FixNames["streamurl"] = "WM/AudioFileURL"; hrthrowonerror = SetString( MFAttributesClsid.MF_BYTESTREAM_EFFECTIVE_URL, m_uri.AbsoluteUri); // Use this to make async calls to handle meta data. m_caller = new LoadValuesCaller(LoadValues); }
// Sets the size of each sample, in bytes. public void SetSampleSize(int nSize) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_SAMPLE_SIZE, nSize); MFError.ThrowExceptionForHR(hr); }
// Strings in icy-metaint format. This method called asynchronously by // DoRead(). private void LoadValues(byte[] bData) { // This horrific bit of code is intended to allow for foreign language // character sets. icy provides no way of handling (say) Thai // characters in song titles (and yes, it happens). But you can // set BYTEADJUSTMENT to 0x0e00 and we'll add it to the bytes here. // There's GOT to be a better way... int byteadjustment; HResult hr = GetUINT32(BYTEADJUSTMENT, out byteadjustment); if (MFError.Failed(hr)) { byteadjustment = 0; } // They're really just bytes. I could just as well use ASCII here. string sData = Encoding.UTF8.GetString(bData); // Strings are of the form: // StreamTitle ='something';StreamUrl='somethingelse'; // Assume no spaces around = or ; // Note that there can be (unescaped) embedded ' marks in the // something. Yeah, really. int iPos = sData.IndexOf('='); while (iPos > 0) { // I'm not sure there's any standardization about the case of // property names, so I'm forcing them all to lc. string sKey = FixName(sData.Substring(0, iPos).Trim().ToLower()); iPos++; // skip past equal // I can't think of a more generic way to search // for this. Since ' can be "embedded" in the // string, how do you know where the end of the // string is? int iEnd = sData.IndexOf("';", iPos); // Discard open and close quote string sValue = sData.Substring(iPos + 1, iEnd - iPos - 1).Trim(); StringBuilder value; value = new StringBuilder(sValue); if (byteadjustment != 0) { for (int x = 0; x < value.Length; x++) { value[x] += (char)byteadjustment; } } // Add (or update) the key/value. MFError throwonerror = m_MetaData.SetProperty( sKey, new PropVariant(value.ToString())); // Skip past ' and ; sData = sData.Substring(iEnd + 1 + 1); iPos = sData.IndexOf('='); } // Let anyone who's listening know that there's new metadata. SendEvent(MediaEventType.MESourceMetadataChanged); }
// Sets a description of how the frames are interlaced. public void SetInterlaceMode(MFVideoInterlaceMode mode) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_INTERLACE_MODE, (int)mode); MFError.ThrowExceptionForHR(hr); }
// Create a new media type. public MediaTypeBuilder() { HResult hr = MFExtern.MFCreateMediaType(out m_pType); MFError.ThrowExceptionForHR(hr); }
// Sets the data error rate in bit errors per second public void SetDataBitErrorRate(int rate) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_AVG_BIT_ERROR_RATE, rate); MFError.ThrowExceptionForHR(hr); }
// Direct wrappers of IMFMediaType methods. // (For these methods, we leave parameter validation to the IMFMediaType implementation.) // Retrieves the major type GUID. public void GetMajorType(out Guid pGuid) { HResult hr = GetMediaType().GetMajorType(out pGuid); MFError.ThrowExceptionForHR(hr); }
// Sets the approximate data rate of the video stream. public void SetAvgerageBitRate(int rate) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_AVG_BITRATE, rate); MFError.ThrowExceptionForHR(hr); }
// Specifies whether the media data is compressed public void IsCompressedFormat(out bool pbCompressed) { HResult hr = GetMediaType().IsCompressedFormat(out pbCompressed); MFError.ThrowExceptionForHR(hr); }
// Sets the aspect ratio of the output rectangle for a video media type. public void SetPadControlFlags(MFVideoPadFlags flags) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_PAD_CONTROL_FLAGS, (int)flags); MFError.ThrowExceptionForHR(hr); }
// Retrieves an alternative representation of the media type. public void GetRepresentation(Guid guidRepresentation, out IntPtr ppvRepresentation) { HResult hr = GetMediaType().GetRepresentation(guidRepresentation, out ppvRepresentation); MFError.ThrowExceptionForHR(hr); }
// Set an enumeration which represents the conversion function from RGB to R'G'B'. public void SetTransferFunction(MFVideoTransferFunction nFxn) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_TRANSFER_FUNCTION, (int)nFxn); MFError.ThrowExceptionForHR(hr); }
// Frees memory that was allocated by the GetRepresentation method. public void FreeRepresentation(Guid guidRepresentation, IntPtr pvRepresentation) { HResult hr = GetMediaType().FreeRepresentation(guidRepresentation, pvRepresentation); MFError.ThrowExceptionForHR(hr); }
// Sets the optimal lighting conditions for viewing. public void SetVideoLighting(MFVideoLighting nLighting) { HResult hr = GetMediaType().SetUINT32(MFAttributesClsid.MF_MT_VIDEO_LIGHTING, (int)nLighting); MFError.ThrowExceptionForHR(hr); }
public static string GetDescription(this HResult hr) { return(MFError.GetErrorText((int)hr)); }