public void TestGetLength() { clearPacket(); Assert.AreEqual(0, packet.GetLength()); packet.AddBuffer(data1); Assert.AreEqual(data1.Length, packet.GetLength()); packet.AddBuffer(data2); Assert.AreEqual(data1.Length + data2.Length, packet.GetLength()); }
private void ProcessServerSample(IBufferPacket packet) { // Only process samples when we are in started state StspSampleHeader sampleHead; // Copy the header object sampleHead = StreamConvertor.TakeObject <StspSampleHeader>(packet); if (packet.GetLength() < 0) { ThrowIfError(HResult.E_INVALIDARG); } if (sampleHead.dwStreamId != _videoStreamId) { return; } // Convert packet to MF sample IMFSample spSample; ThrowIfError(ToMFSample(packet, out spSample)); SetSampleAttributes(sampleHead, spSample); _decoder.ProcessSample(spSample); }
private void ProcessServerSample(IBufferPacket packet) { if (_eSourceState == SourceState.SourceState_Started) { // Only process samples when we are in started state StspSampleHeader sampleHead; // Copy the header object sampleHead = StreamConvertor.TakeObject <StspSampleHeader>(packet); if (packet.GetLength() < 0) { ThrowIfError(HResult.E_INVALIDARG); } MediaStream spStream; ThrowIfError(GetStreamById(sampleHead.dwStreamId, out spStream)); if (spStream.IsActive) { // Convert packet to MF sample IMFSample spSample; ThrowIfError(ToMFSample(packet, out spSample)); // Forward sample to a proper stream. spStream.ProcessSample(sampleHead, spSample); } } else { Throw(HResult.MF_E_UNEXPECTED); } }
private void Initialize(StspStreamDescription pStreamDescription, IBufferPacket attributesBuffer) { //Create the media event queue. ThrowIfError(MFExtern.MFCreateEventQueue(out _spEventQueue)); IMFMediaType mediaType; IMFStreamDescriptor spSD; IMFMediaTypeHandler spMediaTypeHandler; _isVideo = (pStreamDescription.guiMajorType == MFMediaType.Video); //Create a media type object. ThrowIfError(MFExtern.MFCreateMediaType(out mediaType)); if (attributesBuffer.GetLength() < pStreamDescription.cbAttributesSize || pStreamDescription.cbAttributesSize == 0) { //Invalid stream description Throw(HResult.MF_E_UNSUPPORTED_FORMAT); } //Prepare buffer where we will copy attributes to, then initialize media type's attributes var pAttributes = Marshal.AllocHGlobal(pStreamDescription.cbAttributesSize); try { Marshal.Copy(attributesBuffer.TakeBuffer(pStreamDescription.cbAttributesSize), 0, pAttributes, pStreamDescription.cbAttributesSize); ThrowIfError(MFExtern.MFInitAttributesFromBlob(mediaType, pAttributes, pStreamDescription.cbAttributesSize)); } finally { Marshal.FreeHGlobal(pAttributes); } Validation.ValidateInputMediaType(pStreamDescription.guiMajorType, pStreamDescription.guiSubType, mediaType); ThrowIfError(mediaType.SetGUID(MF_MT_MAJOR_TYPE, pStreamDescription.guiMajorType)); ThrowIfError(mediaType.SetGUID(MF_MT_SUBTYPE, pStreamDescription.guiSubType)); //Now we can create MF stream descriptor. ThrowIfError(MFExtern.MFCreateStreamDescriptor(pStreamDescription.dwStreamId, 1, new IMFMediaType[] { mediaType }, out spSD)); ThrowIfError(spSD.GetMediaTypeHandler(out spMediaTypeHandler)); //Set current media type ThrowIfError(spMediaTypeHandler.SetCurrentMediaType(mediaType)); _spStreamDescriptor = spSD; _id = pStreamDescription.dwStreamId; //State of the stream is started. _eSourceState = SourceState.SourceState_Stopped; }
public static HResult ConverToMediaBuffer(IBufferPacket packet, out IMFMediaBuffer mediaBuffer) { mediaBuffer = null; var dataLength = packet.GetLength(); if (packet == null || dataLength == 0) { return(HResult.E_INVALIDARG); } IMFMediaBuffer spMediaBuffer; HResult hr = MFExtern.MFCreateMemoryBuffer(dataLength, out spMediaBuffer); if (MFError.Failed(hr)) { return(hr); } IntPtr pBuffer; int cbMaxLength; int cbCurrentLength; //todo: call lock2d on a 2d buffer because the lock2d is more efficient. /* * if (MFError.Succeeded(Marshal.intp spMediaBuffer.QueryInterface(IID_PPV_ARGS(&_sp2DBuffer)))) * { * LONG lPitch; * hr = _sp2DBuffer.Lock2D(&_pBuffer, &lPitch); * } * else * { * hr = pMediaBuffer->Lock(&_pBuffer, &cbMaxLength, &cbCurrentLength); * }*/ hr = spMediaBuffer.Lock(out pBuffer, out cbMaxLength, out cbCurrentLength); if (MFError.Failed(hr)) { return(hr); } var buffer = packet.TakeBuffer(dataLength); Marshal.Copy(buffer, 0, pBuffer, buffer.Length); spMediaBuffer.SetCurrentLength(buffer.Length); spMediaBuffer.Unlock(); mediaBuffer = spMediaBuffer; buffer = null; return(hr); }
private void initVideoDesctriptor(StspStreamDescription pStreamDescription, IBufferPacket attributesBuffer) { IMFMediaType mediaType; IMFStreamDescriptor spSD; IMFMediaTypeHandler spMediaTypeHandler; //Create a media type object. ThrowIfError(MFExtern.MFCreateMediaType(out mediaType)); if (attributesBuffer.GetLength() < pStreamDescription.cbAttributesSize || pStreamDescription.cbAttributesSize == 0) { //Invalid stream description Throw(HResult.MF_E_UNSUPPORTED_FORMAT); } //Prepare buffer where we will copy attributes to, then initialize media type's attributes var pAttributes = Marshal.AllocHGlobal(pStreamDescription.cbAttributesSize); try { Marshal.Copy(attributesBuffer.TakeBuffer(pStreamDescription.cbAttributesSize), 0, pAttributes, pStreamDescription.cbAttributesSize); ThrowIfError(MFExtern.MFInitAttributesFromBlob(mediaType, pAttributes, pStreamDescription.cbAttributesSize)); } finally { Marshal.FreeHGlobal(pAttributes); } ThrowIfError(mediaType.SetGUID(MF_MT_MAJOR_TYPE, pStreamDescription.guiMajorType)); ThrowIfError(mediaType.SetGUID(MF_MT_SUBTYPE, pStreamDescription.guiSubType)); //Now we can create MF stream descriptor. ThrowIfError(MFExtern.MFCreateStreamDescriptor(pStreamDescription.dwStreamId, 1, new IMFMediaType[] { mediaType }, out spSD)); ThrowIfError(spSD.GetMediaTypeHandler(out spMediaTypeHandler)); //Set current media type ThrowIfError(spMediaTypeHandler.SetCurrentMediaType(mediaType)); _videoMediaType = mediaType; _videoStreamDescriptor = spSD; _videoStreamId = pStreamDescription.dwStreamId; ThrowIfError(MFExtern.MFGetAttributeSize(mediaType, MFAttributesClsid.MF_MT_FRAME_SIZE, out _videoWitdh, out _videoHeight)); ThrowIfError(MFExtern.MFGetAttributeRatio(mediaType, MFAttributesClsid.MF_MT_PIXEL_ASPECT_RATIO, out _videoRatioN, out _videoRatioD)); _drawDevice.InitializeSetVideoSize(_videoWitdh, _videoHeight, new MFRatio(_videoRatioN, _videoRatioD)); }
private void ProcessServerFormatChange(IBufferPacket packet) { IntPtr ptr; try { if (_eSourceState != SourceState.SourceState_Started) { Throw(HResult.MF_E_UNEXPECTED); } int cbTotalLen = packet.GetLength(); if (cbTotalLen <= 0) { Throw(HResult.E_INVALIDARG); } // Minimum size of the operation payload is size of Description structure if (cbTotalLen < Marshal.SizeOf(typeof(StspStreamDescription))) { ThrowIfError(HResult.MF_E_UNSUPPORTED_FORMAT); } //todo: add try or use enhanced method to judge the HResult received from TakeObject(...) StspStreamDescription streamDesc = StreamConvertor.TakeObject <StspStreamDescription>(packet); if (cbTotalLen != Marshal.SizeOf(typeof(StspStreamDescription)) + streamDesc.cbAttributesSize || streamDesc.cbAttributesSize == 0) { ThrowIfError(HResult.MF_E_UNSUPPORTED_FORMAT); } // Prepare buffer where we will copy attributes to ptr = Marshal.AllocHGlobal(streamDesc.cbAttributesSize); var data = packet.TakeBuffer(streamDesc.cbAttributesSize); Marshal.Copy(data, 0, ptr, streamDesc.cbAttributesSize); IMFMediaType spMediaType; // Create a media type object. ThrowIfError(MFExtern.MFCreateMediaType(out spMediaType)); // Initialize media type's attributes ThrowIfError(MFExtern.MFInitAttributesFromBlob(spMediaType, ptr, streamDesc.cbAttributesSize)); } catch (Exception ex) { throw ex; } Marshal.Release(ptr); }
private void ProcessServerDescription(IBufferPacket data) { StspDescription desc = new StspDescription(); var dataLen = data.GetLength(); int descSize = Marshal.SizeOf(typeof(StspDescription)); int streamDescSize = Marshal.SizeOf(typeof(StspStreamDescription)); // Copy description desc = StreamConvertor.TakeObject <StspDescription>(data); // Size of the packet should match size described in the packet (size of Description structure + size of attribute blob) var cbConstantSize = Convert.ToInt32(descSize + (desc.cNumStreams - 1) * streamDescSize); // Check if the input parameters are valid. We only support 2 streams. if (cbConstantSize < Marshal.SizeOf(desc) || desc.cNumStreams == 0 || desc.cNumStreams > 2 || dataLen < cbConstantSize) { ThrowIfError(HResult.MF_E_UNSUPPORTED_FORMAT); } try { List <StspStreamDescription> streamDescs = new List <StspStreamDescription>(desc.aStreams); for (int i = 1; i < desc.cNumStreams; i++) { var sd = StreamConvertor.TakeObject <StspStreamDescription>(data); streamDescs.Add(sd); } int cbAttributeSize = 0; for (int i = 0; i < desc.cNumStreams; ++i) { cbAttributeSize += streamDescs[i].cbAttributesSize; /* todo: check out of range on cbAttributeSize * if (out of range) * { * Throw(HResult.MF_E_UNSUPPORTED_FORMAT); * }*/ } // Validate the parameters. Limit the total size of attributes to 64kB. if ((dataLen != (cbConstantSize + cbAttributeSize)) || (cbAttributeSize > 0x10000)) { Throw(HResult.MF_E_UNSUPPORTED_FORMAT); } //only init for the first video stream. foreach (var sd in streamDescs) { if (sd.guiMajorType == MFMediaType.Video) { initVideoDesctriptor(sd, data); _decoder.initialize(_videoStreamId, _videoMediaType); break; } } } catch (Exception ex) { throw ex; } }
private void ProcessServerDescription(IBufferPacket data) { StspDescription desc = new StspDescription(); var dataLen = data.GetLength(); int descSize = Marshal.SizeOf(typeof(StspDescription)); int streamDescSize = Marshal.SizeOf(typeof(StspStreamDescription)); // Copy description desc = StreamConvertor.TakeObject <StspDescription>(data); // Size of the packet should match size described in the packet (size of Description structure + size of attribute blob) var cbConstantSize = Convert.ToInt32(descSize + (desc.cNumStreams - 1) * streamDescSize); // Check if the input parameters are valid. We only support 2 streams. if (cbConstantSize < Marshal.SizeOf(desc) || desc.cNumStreams == 0 || desc.cNumStreams > 2 || dataLen < cbConstantSize) { ThrowIfError(HResult.MF_E_UNSUPPORTED_FORMAT); } try { List <StspStreamDescription> streamDescs = new List <StspStreamDescription>(desc.aStreams); for (int i = 1; i < desc.cNumStreams; i++) { var sd = StreamConvertor.TakeObject <StspStreamDescription>(data); streamDescs.Add(sd); } int cbAttributeSize = 0; for (int i = 0; i < desc.cNumStreams; ++i) { cbAttributeSize += streamDescs[i].cbAttributesSize; /* todo: check out of range on cbAttributeSize * if (out of range) * { * Throw(HResult.MF_E_UNSUPPORTED_FORMAT); * }*/ } // Validate the parameters. Limit the total size of attributes to 64kB. if ((dataLen != (cbConstantSize + cbAttributeSize)) || (cbAttributeSize > 0x10000)) { Throw(HResult.MF_E_UNSUPPORTED_FORMAT); } // Create stream for every stream description sent by the server. foreach (var sd in streamDescs) { MediaStream spStream; ThrowIfError(MediaStream.CreateInstance(sd, data, this, out spStream)); _streams.Add(spStream); } InitPresentationDescription(); // Everything succeeded we are in stopped state now _eSourceState = SourceState.SourceState_Stopped; CompleteOpen(HResult.S_OK); } catch (Exception ex) { throw ex; } }