private static void OnDebuggerError(IntPtr thisPtr, IntPtr processRef, SharpDX.Result errorHR, int errorCode) { var shadow = ToShadow <ManagedCallbackShadow>(thisPtr); var callback = (ManagedCallbackImpl)shadow.Callback; callback.OnDebuggerError(Process.GetCorProcess(processRef), errorHR, errorCode); }
bool acquireFrame(int timeOut) { _screenResource = null; try { SharpDX.Result _result = _outputDuplication.TryAcquireNextFrame(timeOut, out _duplicateFrameInformation, out _screenResource); } catch (SharpDXException ex) { } if (_screenResource != null) { return(true); } else { return(false); } }
/// <summary> /// Initializes a new instance of the <see cref="MediaFoundationVideoReader"/> class. /// </summary> /// <param name="videoSource">The source video file.</param> public MediaFoundationVideoReader(ResourceLink videoSource) { videoSource.EnsureNotNull(nameof(videoSource)); try { m_videoSource = videoSource; // Create the source reader using (MF.MediaAttributes mediaAttributes = new MF.MediaAttributes(1)) { // We need the 'EnableVideoProcessing' attribute because of the RGB32 format // see (lowest post): http://msdn.developer-works.com/article/11388495/How+to+use+SourceReader+(for+H.264+to+RGB+conversion)%3F mediaAttributes.Set(MF.SourceReaderAttributeKeys.EnableVideoProcessing, 1); mediaAttributes.Set(MF.SourceReaderAttributeKeys.DisableDxva, 1); // Wrap the .net stream to a MF Bytestream m_videoSourceStreamNet = m_videoSource.OpenInputStream(); m_videoSourceStream = new MF.ByteStream(m_videoSourceStreamNet); try { using (MF.MediaAttributes byteStreamAttributes = m_videoSourceStream.QueryInterface <MF.MediaAttributes>()) { byteStreamAttributes.Set(MF.ByteStreamAttributeKeys.OriginName, "Dummy." + videoSource.FileExtension); } } catch (SharpDXException) { // The interface MF.MediaAttributes is not available on some platforms // (occured during tests on Windows 7 without Platform Update) } // Create the sourcereader by custom native method (needed because of the ByteStream arg) IntPtr sourceReaderPointer = IntPtr.Zero; SharpDX.Result sdxResult = NativeMethods.MFCreateSourceReaderFromByteStream_Native( m_videoSourceStream.NativePointer, mediaAttributes.NativePointer, out sourceReaderPointer); sdxResult.CheckError(); m_sourceReader = new MF.SourceReader(sourceReaderPointer); } // Apply source configuration using (MF.MediaType mediaType = new MF.MediaType()) { mediaType.Set(MF.MediaTypeAttributeKeys.MajorType, MF.MediaTypeGuids.Video); mediaType.Set(MF.MediaTypeAttributeKeys.Subtype, MF.VideoFormatGuids.Rgb32); m_sourceReader.SetCurrentMediaType( MF.SourceReaderIndex.FirstVideoStream, mediaType); m_sourceReader.SetStreamSelection(MF.SourceReaderIndex.FirstVideoStream, new SharpDX.Mathematics.Interop.RawBool(true)); } // Read some information about the source using (MF.MediaType mediaType = m_sourceReader.GetCurrentMediaType(MF.SourceReaderIndex.FirstVideoStream)) { long frameSizeLong = mediaType.Get(MF.MediaTypeAttributeKeys.FrameSize); m_frameSize = new Size2(MFHelper.GetValuesByMFEncodedInts(frameSizeLong)); } // Get additional propertie3s m_durationLong = m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.PresentationDescriptionAttributeKeys.Duration); m_characteristics = (MediaSourceCharacteristics_Internal)m_sourceReader.GetPresentationAttribute( MF.SourceReaderIndex.MediaSource, MF.SourceReaderAttributeKeys.MediaSourceCharacteristics); } catch (Exception) { this.Dispose(); throw; } }