/// <summary> sample callback, NOT USED. </summary> int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { if (!m_bGotOne) { // Set bGotOne to prevent further calls until we // request a new bitmap. m_bGotOne = true; IntPtr pBuffer; pSample.GetPointer(out pBuffer); int iBufferLen = pSample.GetSize(); if (iBufferLen > m_stride * m_videoHeight) { throw new Exception("Buffer is wrong size"); } #if !LIB unsafe { Buffer.MemoryCopy(pBuffer.ToPointer(), m_handle.ToPointer(), iBufferLen, iBufferLen); } #endif // Picture is ready. m_PictureReady.Set(); } Marshal.ReleaseComObject(pSample); return(0); }
/// <summary> sample callback, NOT USED. </summary> int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { if (!m_bGotOne) { // Set bGotOne to prevent further calls until we // request a new bitmap. m_bGotOne = true; IntPtr pBuffer; pSample.GetPointer(out pBuffer); int iBufferLen = pSample.GetSize(); if (pSample.GetSize() > m_stride * m_videoHeight) { throw new Exception("Buffer is wrong size"); } NativeMethods.CopyMemory(m_handle, pBuffer, m_stride * m_videoHeight); // Picture is ready. m_PictureReady.Set(); } Marshal.ReleaseComObject(pSample); return(0); }
/// <summary> /// Sample callback, NOT USED. /// </summary> /// <param name="SampleTime">The sample time.</param> /// <param name="pSample">The media sample.</param> /// <returns>The result.</returns> int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { if (!_gotOneImage) { // Set bGotOne to prevent further calls until we // request a new bitmap. _gotOneImage = true; IntPtr pBuffer; pSample.GetPointer(out pBuffer); int iBufferLen = pSample.GetSize(); if (pSample.GetSize() > _strideImage * _videoHeightImage) { throw new Exception("Buffer is wrong size"); } Devices.CopyMemory(_handleImage, pBuffer, _strideImage * _videoHeightImage); // Picture is ready. if (_pictureReadyImage != null) { _pictureReadyImage.Set(); } } Marshal.ReleaseComObject(pSample); return(0); }
public int Request(IMediaSample pSample, IntPtr dwUser) { int hr; Debug.Write(string.Format("Request {0} user: {1}", pSample != null, dwUser)); MediaHolder mh = new MediaHolder(pSample, dwUser); lock (this) // Protect the m_ variables { if (!m_GraphIsFlushing) { // Now that we have populated everything, wait for the splitter to call the // WaitForNext() method to retrieve the data m_Requests.Enqueue(mh); // Let the waiting thread in WaitForNext know something is ready for // processing m_Wait.Set(); hr = S_Ok; } else { hr = DsResults.E_WrongState; } } Debug.WriteLine(hr.ToString()); return(hr); }
int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample) { try { if (transcoderError != null) { return(WinAPI.E_FAIL); } samplesProcessed += 1; int dataLen = pSample.GetActualDataLength(); IntPtr bufPtr; int hr = pSample.GetPointer(out bufPtr); bool processed = ProcessSample(bufPtr, dataLen, sampleTime); return(processed ? WinAPI.S_OK : WinAPI.E_FAIL); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.ToString()); } finally { Marshal.ReleaseComObject(pSample); } return(WinAPI.E_FAIL); } // end of SampleCB
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { // Make sure we only get called once if (!m_GotOne) { // copy the media sample to a member var m_GotOne = true; m_ims = pSample; // Call all the tests TestLength(); TestSyncPoint(); TestPreroll(); TestDiscontinuity(); TestTime(); TestMediaTime(); TestMediaType(); TestPointer(); m_ims = null; // Set the completion event so DoTests can return m_TestComplete.Set(); } Marshal.ReleaseComObject(pSample); return(0); }
public Int32 SampleCB(Double SampleTime, IMediaSample pSample) { if (pSample == null) { return(-1); } var len = pSample.GetActualDataLength( ); IntPtr pbuf; if (pSample.GetPointer(out pbuf) == 0 && len > 0) { var buf = new Byte [len]; Marshal.Copy(pbuf, buf, 0, len); var image = new Bitmap(640, 480); var at = 0; if (len % 3 != 0) { // image is not bitmap 24bit, what now, burn it to the ground? return(0); } for (var bOff = 0; bOff < len; bOff += 3) { var color = Color.FromArgb(buf [bOff + 2], buf [bOff + 1], buf [bOff]); var x = (image.Width - 1) - (at % image.Width); var y = (image.Height - 1) - (at / image.Width % image.Height); image.SetPixel(x, y, color); at++; } lastImage?.Dispose( ); lastImage = image; } Marshal.ReleaseComObject(pSample); return(0); }
/// <summary> /// Sample callback, NOT USED. /// </summary> /// <param name="SampleTime">The sample time.</param> /// <param name="pSample">The media sample.</param> /// <returns>The result.</returns> int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { if (!_gotOneSound) { // Set bGotOne to prevent further calls until we // request a new bitmap. _gotOneSound = true; IntPtr pBuffer; pSample.GetPointer(out pBuffer); int iBufferLen = pSample.GetSize(); if (pSample.GetSize() > _soundMemorySize) { throw new Exception("Buffer is wrong size"); } Directx.Utility.Devices.CopyMemory(_handleSound, pBuffer, _soundMemorySize); // Picture is ready. if (_audoReadySound != null) { _audoReadySound.Set(); } } Marshal.ReleaseComObject(pSample); return(0); }
private bool ProcessInternal <T>(IntPtr samples, short channels, int length, IMediaSample output) where T : struct { UpdateGpuResources(length); var sampleCount = length / channels; try { var devInputSamples = GetDevInputSamples <T>(length); var devInputResult = GetDevNormSamples(channels, sampleCount); var devOutputResult = GetDevOutputSamples <T>(length); Gpu.CopyToDevice(samples, 0, devInputSamples, 0, length); Gpu.Launch(AudioProc.THREAD_COUNT, 1, string.Format("GetSamples{0}", typeof(T).Name), devInputSamples, devInputResult); Process(devInputResult, channels, sampleCount); output.GetPointer(out samples); Gpu.Launch(AudioProc.THREAD_COUNT, 1, string.Format("PutSamples{0}", typeof(T).Name), devInputResult, devOutputResult); Gpu.CopyFromDevice(devOutputResult, 0, samples, 0, length); } catch (Exception ex) { Trace.WriteLine(ex); return(false); } return(true); }
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { int hr = base.SampleCB(SampleTime, pSample); Marshal.ReleaseComObject(pSample); return(hr); }
override public int SetTimeStamps(IMediaSample pSample) { DsLong rtStart = new DsLong(m_iFrameNumber * _fps); DsLong rtStop = new DsLong(rtStart + _fps); int hr = pSample.SetTime(rtStart, rtStop); return(hr); }
protected override void Allocate() { if (m_Format == null) { throw new InvalidOperationException("Format not set, can't allocate output."); } m_Sample = new MediaSample(MediaSample); }
public int SampleCB(double SampleTime, IMediaSample pSample) { if (pSample == null) { return(-1); } if (Trigger.WaitOne(0)) { int len = pSample.GetActualDataLength(); if (len > 0) { IntPtr buf; if (pSample.GetPointer(out buf) == 0) { byte[] buffer = new byte[len]; Marshal.Copy(buf, buffer, 0, len); using (var bmp = new Bitmap(Width, Height, Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, buf)) { bmp.RotateFlip(RotateFlipType.Rotate180FlipX); _previewImage = (Bitmap)bmp.Clone(); callback?.Invoke(); //using (var ms = new MemoryStream()) //{ // //bmp.Save(ms, System.Drawing.Imaging.ImageFormat.Png); // bmp.Save("hallo.pnb"); // //byte[] data = ms.ToArray(); // //var uri = new Uri($""); // //var req = (HttpWebRequest)HttpWebRequest.Create(uri); // //req.Method = "POST"; // //req.ContentType = "application/octet-stream"; // //req.Headers.Add("Ocp-Apim-Subscription-Key", ""); // //req.ContentLength = data.Length; // //using (var stm = req.GetRequestStream()) // // stm.Write(data, 0, data.Length); // //using (var res = req.GetResponse()) // //using (var stm = res.GetResponseStream()) // //using (var sr = new StreamReader(stm)) // //using (var jr = new JsonTextReader(sr)) // //{ // // var obj = serializer.Deserialize<ExpandoObject[]>(jr); // //} //} } } } Trigger.Reset(); } Marshal.ReleaseComObject(pSample); return(0); }
int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample) { var mediaType = new AMMediaType(); /* We query for the media type the sample grabber is using */ int hr = m_sampleGrabber.GetConnectedMediaType(mediaType); var videoInfo = new VideoInfoHeader(); /* 'Cast' the pointer to our managed struct */ Marshal.PtrToStructure(mediaType.formatPtr, videoInfo); /* The stride is "How many bytes across for each pixel line (0 to width)" */ int stride = Math.Abs(videoInfo.BmiHeader.Width * (videoInfo.BmiHeader.BitCount / 8 /* eight bits per byte */)); int width = videoInfo.BmiHeader.Width; int height = videoInfo.BmiHeader.Height; if (m_videoFrame == null) { InitializeBitmapFrame(width, height); } if (m_videoFrame == null) { return(0); } BitmapData bmpData = m_videoFrame.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); /* Get the pointer to the pixels */ IntPtr pBmp = bmpData.Scan0; IntPtr samplePtr; /* Get the native pointer to the sample */ pSample.GetPointer(out samplePtr); int pSize = stride * height; /* Copy the memory from the sample pointer to our bitmap pixel pointer */ CopyMemory(pBmp, samplePtr, pSize); m_videoFrame.UnlockBits(bmpData); InvokeNewVideoSample(new VideoSampleArgs { VideoFrame = m_videoFrame }); DsUtils.FreeAMMediaType(mediaType); /* Dereference the sample COM object */ Marshal.ReleaseComObject(pSample); return(0); }
/// <summary> /// コンストラクタ (初期値指定) /// </summary> /// <param name="sample_time">サンプルタイム</param> /// <param name="sample_data">サンプルデータ</param> public CxSampleGrabberEventArgs(double sample_time, IMediaSample sample_data) { SampleTime = sample_time; SampleData = sample_data; if (sample_data != null) { sample_data.GetPointer(ref m_Address); m_Length = sample_data.GetSize(); } }
override public int SetTimeStamps(IMediaSample pSample, int iFrameNumber) { reader.ReadTimeStamps(iFrameNumber, out startFrameTime, out endFrameTime); DsLong rtStart = new DsLong(startFrameTime); DsLong rtStop = new DsLong(endFrameTime); int hr = pSample.SetTime(rtStart, rtStop); return(hr); }
/// <summary> /// /// </summary> /// <param name="SampleTime"></param> /// <param name="pSample"></param> /// <returns></returns> public int SampleCB(double SampleTime, IMediaSample pSample) { IntPtr pBuffer; pSample.GetPointer(out pBuffer); int BufferLen = pSample.GetActualDataLength(); Marshal.ReleaseComObject(pSample); return(0); }
public MediaSample(IMediaSample sample) { m_Size = sample.GetSize(); m_ActualDataLength = sample.GetActualDataLength(); m_IsSyncPoint = sample.IsSyncPoint() == 0; m_IsPreroll = sample.IsPreroll() == 0; m_IsDiscontinuity = sample.IsDiscontinuity() == 0; sample.GetTime(out m_TimeStart, out m_TimeEnd); sample.GetMediaTime(out m_MediaTimeStart, out m_MediaTimeEnd); m_Buffer = Marshal.AllocCoTaskMem(m_Size); }
/// <summary> /// Implementation of ISampleGrabberCB. /// </summary> int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample) { IntPtr pBuffer; int hr = pSample.GetPointer(out pBuffer); DsError.ThrowExceptionForHR(hr); Analyze(sampleTime, pBuffer, pSample.GetSize()); Marshal.ReleaseComObject(pSample); return 0; }
public int SampleCB(double sampleTime, IMediaSample pSample) { IntPtr ptr; pSample.GetPointer(out ptr); Execute(ptr, VideoHeight, VideoStride); Marshal.ReleaseComObject(pSample); return(0); }
/// <summary> /// フレームキャプチャ完了時に呼び出されるコールバック関数 /// </summary> /// <param name="sample_time">サンプルタイム</param> /// <param name="sample_data">サンプルデータ</param> /// <returns> /// DSLab.HRESULT.S_OK を返します。 /// </returns> int ISampleGrabberCB.SampleCB(double sample_time, IMediaSample sample_data) { if (this.Enable) { var args = new CxSampleGrabberEventArgs(sample_time, sample_data); if (this.Notify != null) this.Notify(this, args); if (args.Cancellation) return (int)HRESULT.S_FALSE; } return (int)HRESULT.S_OK; }
/// <summary> /// Calculate the timestamps based on the frame number and the frames per second. /// </summary> /// <param name="sample">The <see cref="IMediaSample"/> to be timed.</param> /// <returns>0 = success, negative values for errors</returns> public override int SetTimeStamps(IMediaSample sample) { // Calculate the start/end times based on the current frame number // and frame rate DsLong start = new DsLong(this.FrameNumber * this.framesPerSecond); DsLong stop = new DsLong(start + this.framesPerSecond); // Set the times into the sample int hr = sample.SetTime(start, stop); return(hr); }
// Calculate the timestamps based on the frame number and the frames per second override public int SetTimeStamps(IMediaSample pSample) { // Calculate the start/end times based on the current frame number // and frame rate DsLong rtStart = new DsLong(m_iFrameNumber * m_FPS); DsLong rtStop = new DsLong(rtStart + m_FPS); // Set the times into the sample int hr = pSample.SetTime(rtStart, rtStop); return(hr); }
public int WaitForNext(int dwTimeout, out IMediaSample ppSample, out IntPtr pdwUser) { int hr; Debug.WriteLine(string.Format("WaitForNext {0}", dwTimeout)); // In case something goes wrong ppSample = null; pdwUser = IntPtr.Zero; if (!m_GraphIsFlushing) { // Count threads we have waiting Interlocked.Increment(ref m_ThreadsWaiting); bool bWait = m_Wait.WaitOne(dwTimeout, false); lock (this) { // If we found one before timing out, send it back if (bWait && !m_GraphIsFlushing) { MediaHolder mh = m_Requests.Dequeue() as MediaHolder; ppSample = mh.Sample; pdwUser = mh.User; hr = PopulateMediaSample(ppSample); } else { hr = DsResults.E_Timeout; } // If there is another request, reset the event. Also // if the graph is flushing, allow the other threads // to exit if ((m_Requests.Count > 0) || (m_GraphIsFlushing)) { m_Wait.Set(); } } // Count threads we have waiting Interlocked.Decrement(ref m_ThreadsWaiting); } else { hr = DsResults.E_WrongState; } return(hr); }
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { try { throw new NotImplementedException(); } catch { } finally { Marshal.ReleaseComObject(pSample); } return(0); }
public int SampleCB(double SampleTime, IMediaSample pSample) { if (pSample == null) return -1; int len = pSample.GetActualDataLength(); IntPtr pBuf; if (pSample.GetPointer(out pBuf) == 0 && len > 0) { m_imageBuf = null; m_imageBuf = new byte[len]; Marshal.Copy(pBuf, m_imageBuf, 0, len); } return 0; }
/// <summary> /// The callback from the GSSF to populate the sample. This class isn't intended /// to be overridden. Child classes should instead implement PopulateSample, /// which this method calls. /// </summary> /// <param name="pSample">The sample to populate</param> /// <returns>HRESULT</returns> public int SampleCallback(IMediaSample pSample) { int hr; IntPtr pData; try { // Get the buffer into which we will copy the data hr = pSample.GetPointer(out pData); if (hr >= 0) { // Find out the amount of space in the buffer int cbData = pSample.GetSize(); lock (this) { hr = SetTimeStamps(pSample); if (hr >= 0) { int iRead; // Populate the sample hr = PopulateSample(pData, cbData, out iRead); if (hr >= 0) { if (hr == S_Ok) // 1 == End of stream { // increment the frame number for next time m_iFrameNumber++; } else { m_iFrameNumber = 0; } pSample.SetActualDataLength(iRead); } } } } } finally { // Release our pointer the the media sample. THIS IS ESSENTIAL! If // you don't do this, the graph will stop after about 2 samples. Marshal.ReleaseComObject(pSample); } return(hr); }
public AudioOutput(WaveFormatExtensible format, IMediaSample mediaSample) { if (format == null) { throw new ArgumentNullException("format"); } if (mediaSample == null) { throw new ArgumentNullException("mediaSample"); } MediaSample = mediaSample; Format = format; }
public int SampleCB(double SampleTime, IMediaSample pSample) { DateTime dt = DateTime.Now; lock (this) { sb.AppendFormat("{0:T}.{1:D3}: ", dt, dt.Millisecond); sb.AppendFormat("SampleTime={0:G4}, ", SampleTime); if (pSample != null) { long start = 0, end = 0; pSample.GetTime(out start, out end); sb.AppendFormat("Time(start={0}, end={1}), ", start, end); pSample.GetMediaTime(out start, out end); sb.AppendFormat("MediaTime(start={0}, end={1}), ", start, end); int len = pSample.GetActualDataLength(); sb.AppendFormat("data length={0}, ", len); bool syncpoint = pSample.IsSyncPoint() == 0; sb.AppendFormat("keyframe={0}", syncpoint); if (pSample.IsDiscontinuity() == 0) { sb.Append(", Discontinuity"); } if (pSample.IsPreroll() == 0) { sb.Append(", Preroll"); } int n = Math.Min(len, 8); IntPtr pbuf; if (pSample.GetPointer(out pbuf) == 0) { byte[] buf = new byte[n]; Marshal.Copy(pbuf, buf, 0, n); sb.Append(", Data="); for (int i = 0; i < n; i++) { sb.AppendFormat("{0:X2}", buf[i]); } sb.Append("..."); } } else { sb.Append("pSample==NULL!"); } sb.Append(Environment.NewLine); } Marshal.ReleaseComObject(pSample); return(0); }
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { // SampleCB was called m_Called |= 1; // Simple test to see if parms are correct Debug.Assert(pSample.GetActualDataLength() > 0, "SampleCB size"); // Set the completion event so DoTests can return m_TestComplete.Set(); Marshal.ReleaseComObject(pSample); return(0); }
public int SampleCB(double SampleTime, IMediaSample pSample) { // Console.WriteLine("**********************55555555555555555555555**********************"); if (pSample == null) return -1; int len = pSample.GetActualDataLength(); IntPtr pbuf; if (pSample.GetPointer(out pbuf) == 0 && len > 0) { byte[] buf = new byte[len]; Marshal.Copy(pbuf, buf, 0, len); for (int i = 0; i < len; i += 2) buf[i] = (byte)(255 - buf[i]); Marshal.Copy(buf, 0, pbuf, len); } return 0; }
/// <summary> /// Called by the GenericSampleSourceFilter. This routine populates the MediaSample. /// </summary> /// <param name="sample">Pointer to a sample</param> /// <returns>0 = success, 1 = end of stream, negative values for errors</returns> public virtual int SampleCallback(IMediaSample sample) { int hr; IntPtr dataPointer; try { // Get the buffer into which we will copy the data hr = sample.GetPointer(out dataPointer); if (hr >= 0) { // Set TRUE on every sample for uncompressed frames hr = sample.SetSyncPoint(true); if (hr >= 0) { // Find out the amount of space in the buffer int callbackData = sample.GetSize(); hr = this.SetTimeStamps(sample); if (hr >= 0) { int read; // Get copy the data into the sample hr = this.GetImage(this.frameNumber, dataPointer, callbackData, out read); // 1 == End of stream if (hr == 0) { sample.SetActualDataLength(read); // increment the frame number for next time this.frameNumber++; } } } } } finally { // Release our pointer the the media sample. THIS IS ESSENTIAL! If // you don't do this, the graph will stop after about 2 samples. Marshal.ReleaseComObject(sample); } return(hr); }
public static void CopySample(IMediaSample src, IMediaSample dest, bool copySamples) { var sourceSize = src.GetActualDataLength(); if (copySamples) { IntPtr sourceBuffer; src.GetPointer(out sourceBuffer); IntPtr destBuffer; dest.GetPointer(out destBuffer); CopyMemory(destBuffer, sourceBuffer, sourceSize); } // Copy the sample times long start, end; if (src.GetTime(out start, out end) == S_OK) { dest.SetTime(start, end); } if (src.GetMediaTime(out start, out end) == S_OK) { dest.SetMediaTime(start, end); } // Copy the media type AMMediaType mediaType; src.GetMediaType(out mediaType); dest.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); dest.SetSyncPoint(src.IsSyncPoint() == S_OK); dest.SetPreroll(src.IsPreroll() == S_OK); dest.SetDiscontinuity(src.IsDiscontinuity() == S_OK); // Copy the actual data length dest.SetActualDataLength(sourceSize); }
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { throw new NotImplementedException(); }
/// <summary> /// �t���[���L���v�`���������ɌĂяo�����R�[���o�b�N�� /// </summary> /// <param name="sample_time">�T���v���^�C��</param> /// <param name="sample_data">�T���v���f�[�^</param> /// <returns> /// DSLab.HRESULT.S_OK ��Ԃ��܂��B /// </returns> int ISampleGrabberCB.SampleCB(double sample_time, IMediaSample sample_data) { if (this.Enable) { var args = new CxSampleGrabberEventArgs(sample_time, sample_data); if (this.Notify != null) this.Notify(this, args); if (args.Cancellation) return (int)HRESULT.S_FALSE; } return (int)HRESULT.S_OK; }
/// <summary> sample callback, NOT USED. </summary> int ISampleGrabberCB.SampleCB( double SampleTime, IMediaSample pSample ) { Trace.WriteLine( "!!CB: ISampleGrabberCB.SampleCB" ); return 0; }
int ISampleGrabberCB.SampleCB(double sampleTime, IMediaSample pSample) { var mediaType = new AMMediaType(); /* We query for the media type the sample grabber is using */ int hr = m_sampleGrabber.GetConnectedMediaType(mediaType); var videoInfo = new VideoInfoHeader(); /* 'Cast' the pointer to our managed struct */ Marshal.PtrToStructure(mediaType.formatPtr, videoInfo); /* The stride is "How many bytes across for each pixel line (0 to width)" */ int stride = Math.Abs(videoInfo.BmiHeader.Width * (videoInfo.BmiHeader.BitCount / 8 /* eight bits per byte */)); int width = videoInfo.BmiHeader.Width; int height = videoInfo.BmiHeader.Height; if (m_videoFrame == null) InitializeBitmapFrame(width, height); if (m_videoFrame == null) return 0; BitmapData bmpData = m_videoFrame.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb); /* Get the pointer to the pixels */ IntPtr pBmp = bmpData.Scan0; IntPtr samplePtr; /* Get the native pointer to the sample */ pSample.GetPointer(out samplePtr); int pSize = stride * height; /* Copy the memory from the sample pointer to our bitmap pixel pointer */ CopyMemory(pBmp, samplePtr, pSize); m_videoFrame.UnlockBits(bmpData); InvokeNewVideoSample(new VideoSampleArgs { VideoFrame = m_videoFrame }); DsUtils.FreeAMMediaType(mediaType); /* Dereference the sample COM object */ Marshal.ReleaseComObject(pSample); return 0; }
int IMemAllocator.GetBuffer(out IMediaSample ppBuffer, long pStartTime, long pEndTime, AMGBF dwFlags) { throw new NotImplementedException(); }
/// <summary> /// フレームキャプチャ完了時に呼び出されるコールバック関数 /// </summary> /// <param name="sample_time">タイムスタンプ</param> /// <param name="pSample">サンプルデータ</param> /// <returns> /// DSLab.HRESULT.S_OK を返します。 /// </returns> int ISampleGrabberCB.SampleCB(double sample_time, IMediaSample pSample) { return (int)DSLab.HRESULT.S_OK; }
int IAsyncReader.Request(IMediaSample pSample, IntPtr dwUser) { throw new NotImplementedException(); }
public bool Transcode(TranscodeInfo info, MediaPortal.Core.Transcoding.VideoFormat format, MediaPortal.Core.Transcoding.Quality quality, Standard standard) { if (!Supports(format)) return false; string ext = System.IO.Path.GetExtension(info.file); if (ext.ToLowerInvariant() != ".ts" && ext.ToLowerInvariant() != ".mpg") { Log.Info("TSReader2MP4: wrong file format"); return false; } try { graphBuilder = (IGraphBuilder)new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)graphBuilder); Log.Info("TSReader2MP4: add filesource"); TsReader reader = new TsReader(); tsreaderSource = (IBaseFilter)reader; IBaseFilter filter = (IBaseFilter)tsreaderSource; graphBuilder.AddFilter(filter, "TSReader Source"); IFileSourceFilter fileSource = (IFileSourceFilter)tsreaderSource; Log.Info("TSReader2MP4: load file:{0}", info.file); int hr = fileSource.Load(info.file, null); //add audio/video codecs string strVideoCodec = ""; string strH264VideoCodec = ""; string strAudioCodec = ""; string strAACAudioCodec = ""; using (MediaPortal.Profile.Settings xmlreader = new MediaPortal.Profile.MPSettings()) { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAACAudioCodec = xmlreader.GetValueAsString("mytv", "aacaudiocodec", ""); strH264VideoCodec = xmlreader.GetValueAsString("mytv", "h264videocodec", ""); } //Find the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: find tsreader compatible audio/video decoders"); IPin pinOut0, pinOut1; IPin pinIn0, pinIn1; pinOut0 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 0); //audio pinOut1 = DsFindPin.ByDirection((IBaseFilter)tsreaderSource, PinDirection.Output, 1); //video if (pinOut0 == null || pinOut1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get output pins of tsreader"); Cleanup(); return false; } bool usingAAC = false; IEnumMediaTypes enumMediaTypes; hr = pinOut0.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Audio && mediaTypes[0].subType == MediaSubType.LATMAAC) { Log.Info("TSReader2MP4: found LATM AAC audio out pin on tsreader"); usingAAC = true; } } bool usingH264 = false; hr = pinOut1.EnumMediaTypes(out enumMediaTypes); while (true) { AMMediaType[] mediaTypes = new AMMediaType[1]; int typesFetched; hr = enumMediaTypes.Next(1, mediaTypes, out typesFetched); if (hr != 0 || typesFetched == 0) break; if (mediaTypes[0].majorType == MediaType.Video && mediaTypes[0].subType == AVC1) { Log.Info("TSReader2MP4: found H.264 video out pin on tsreader"); usingH264 = true; } } //Add the type of decoder required for the output video & audio pins on TSReader. Log.Info("TSReader2MP4: add audio/video decoders to graph"); if (usingH264 == false) { Log.Info("TSReader2MP4: add mpeg2 video decoder:{0}", strVideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: unable to add mpeg2 video decoder"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add h264 video codec:{0}", strH264VideoCodec); VideoCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strH264VideoCodec); if (VideoCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add h264 video codec"); Cleanup(); return false; } } if (usingAAC == false) { Log.Info("TSReader2MP4: add mpeg2 audio codec:{0}", strAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add mpeg2 audio codec"); Cleanup(); return false; } } else { Log.Info("TSReader2MP4: add aac audio codec:{0}", strAACAudioCodec); AudioCodec = DirectShowUtil.AddFilterToGraph(graphBuilder, strAACAudioCodec); if (AudioCodec == null) { Log.Error("TSReader2MP4: FAILED:unable to add aac audio codec"); Cleanup(); return false; } } Log.Info("TSReader2MP4: connect tsreader->audio/video decoders"); //connect output #0 (audio) of tsreader->audio decoder input pin 0 //connect output #1 (video) of tsreader->video decoder input pin 0 pinIn0 = DsFindPin.ByDirection(AudioCodec, PinDirection.Input, 0); //audio pinIn1 = DsFindPin.ByDirection(VideoCodec, PinDirection.Input, 0); //video if (pinIn0 == null || pinIn1 == null) { Log.Error("TSReader2MP4: FAILED: unable to get pins of video/audio codecs"); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut0, pinIn0); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect audio pins :0x{0:X}", hr); Cleanup(); return false; } hr = graphBuilder.Connect(pinOut1, pinIn1); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to connect video pins :0x{0:X}", hr); Cleanup(); return false; } //add encoders, muxer & filewriter if (!AddCodecs(graphBuilder, info)) return false; //setup graph controls mediaControl = graphBuilder as IMediaControl; mediaSeeking = tsreaderSource as IMediaSeeking; mediaEvt = graphBuilder as IMediaEventEx; mediaPos = graphBuilder as IMediaPosition; //get file duration Log.Info("TSReader2MP4: Get duration of recording"); long lTime = 5 * 60 * 60; lTime *= 10000000; long pStop = 0; hr = mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); if (hr == 0) { long lStreamPos; mediaSeeking.GetCurrentPosition(out lStreamPos); // stream position m_dDuration = lStreamPos; lTime = 0; mediaSeeking.SetPositions(new DsLong(lTime), AMSeekingSeekingFlags.AbsolutePositioning, new DsLong(pStop), AMSeekingSeekingFlags.NoPositioning); } double duration = m_dDuration / 10000000d; Log.Info("TSReader2MP4: recording duration: {0}", MediaPortal.Util.Utils.SecondsToHMSString((int)duration)); //run the graph to initialize the filters to be sure hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED: unable to start graph :0x{0:X}", hr); Cleanup(); return false; } int maxCount = 20; while (true) { long lCurrent; mediaSeeking.GetCurrentPosition(out lCurrent); double dpos = (double)lCurrent; dpos /= 10000000d; System.Threading.Thread.Sleep(100); if (dpos >= 2.0d) break; maxCount--; if (maxCount <= 0) break; } mediaControl.Stop(); FilterState state; mediaControl.GetState(500, out state); GC.Collect(); GC.Collect(); GC.Collect(); GC.WaitForPendingFinalizers(); graphBuilder.RemoveFilter(mp4Muxer); graphBuilder.RemoveFilter(h264Encoder); graphBuilder.RemoveFilter(aacEncoder); graphBuilder.RemoveFilter((IBaseFilter)fileWriterFilter); if (!AddCodecs(graphBuilder, info)) return false; //Set Encoder quality & Muxer settings if (!EncoderSet(graphBuilder, info)) return false; //start transcoding - run the graph Log.Info("TSReader2MP4: start transcoding"); //setup flow control //need to leverage CBAsePin, CPullPin & IAsyncReader methods. IAsyncReader synchVideo = null; mediaSample = VideoCodec as IMediaSample; hr = synchVideo.SyncReadAligned(mediaSample); //So we only parse decoder output whent the encoders are ready. hr = mediaControl.Run(); if (hr != 0) { Log.Error("TSReader2MP4: FAILED:unable to start graph :0x{0:X}", hr); Cleanup(); return false; } } catch (Exception ex) { Log.Error("TSReader2MP4: Unable create graph: {0}", ex.Message); Cleanup(); return false; } return true; }
/// <summary> /// Required public callback from DirectShow SampleGrabber. Do not call this method. /// </summary> public int SampleCB(double SampleTime, IMediaSample pSample) { // Return S_OK return 0; }
/// <summary> /// The callback from the GSSF to populate the sample. This class isn't intended /// to be overridden. Child classes should instead implement PopulateSample, /// which this method calls. /// </summary> /// <param name="pSample">The sample to populate</param> /// <returns>HRESULT</returns> public int SampleCallback(IMediaSample pSample) { int hr; IntPtr pData; try { // Get the buffer into which we will copy the data hr = pSample.GetPointer(out pData); if (hr >= 0) { // Find out the amount of space in the buffer int cbData = pSample.GetSize(); lock (this) { hr = SetTimeStamps(pSample); if (hr >= 0) { int iRead; // Populate the sample hr = PopulateSample(pData, cbData, out iRead); if (hr >= 0) { if (hr == S_Ok) // 1 == End of stream { // increment the frame number for next time m_iFrameNumber++; } else { m_iFrameNumber = 0; } pSample.SetActualDataLength(iRead); } } } } } finally { // Release our pointer the the media sample. THIS IS ESSENTIAL! If // you don't do this, the graph will stop after about 2 samples. Marshal.ReleaseComObject(pSample); } return hr; }
// ISampleGrabberCB methods public int SampleCB(double SampleTime, IMediaSample pSample) { Marshal.ReleaseComObject(pSample); return 0; }
public int SampleCB(double SampleTime, IMediaSample pSample) { throw new NotImplementedException(); }
/// <summary> sample callback, NOT USED. </summary> int ISampleGrabberCB.SampleCB( double SampleTime, IMediaSample pSample ) { return 0; }
/// <summary> /// receives a pointer to the media sample. /// </summary> /// <param name="sampleTime">Starting time of the sample, in seconds.</param> /// <param name="pSample">Pointer to the IMediaSample interface of the sample.</param> /// <returns></returns> public int SampleCB(double sampleTime, IMediaSample pSample) { return 0; }
int IAsyncReader.WaitForNext(int dwTimeout, out IMediaSample ppSample, out IntPtr pdwUser) { throw new NotImplementedException(); }
int IMemAllocator.ReleaseBuffer(IMediaSample pBuffer) { throw new NotImplementedException(); }
// Calculate the timestamps based on the frame number and the frames per second public override int SetTimeStamps(IMediaSample pSample) { // Calculate the start/end times based on the current frame number // and frame rate DsLong rtStart = new DsLong(m_iFrameNumber * m_FPS); DsLong rtStop = new DsLong(rtStart + m_FPS); // Set the times into the sample int hr = pSample.SetTime(rtStart, rtStop); return hr; }
/// <summary> /// Set the timestamps on samples. It is not always required that media samples have /// timestamps, so this default implementation does not set one. To set a timestamp /// on the samples, override this method in a child class. /// </summary> /// <param name="pSample">Pointer to the sample interface</param> /// <returns>HRESULT</returns> public virtual int SetTimeStamps(IMediaSample pSample) { return S_Ok; }
/// <summary> /// Called by the GenericSampleSourceFilter. This routine populates the MediaSample. /// </summary> /// <param name="pSample">Pointer to a sample</param> /// <returns>0 = success, 1 = end of stream, negative values for errors</returns> public virtual int SampleCallback(IMediaSample pSample) { int hr; IntPtr pData; try { // Get the buffer into which we will copy the data hr = pSample.GetPointer(out pData); if (hr >= 0) { // Set TRUE on every sample for uncompressed frames hr = pSample.SetSyncPoint(true); if (hr >= 0) { // Find out the amount of space in the buffer int cbData = pSample.GetSize(); hr = SetTimeStamps(pSample); if (hr >= 0) { int iRead; // Get copy the data into the sample hr = GetImage(m_iFrameNumber, pData, cbData, out iRead); if (hr == 0) // 1 == End of stream { pSample.SetActualDataLength(iRead); // increment the frame number for next time m_iFrameNumber++; } } } } } finally { // Release our pointer the the media sample. THIS IS ESSENTIAL! If // you don't do this, the graph will stop after about 2 samples. Marshal.ReleaseComObject(pSample); } return hr; }
int IAsyncReader.SyncReadAligned(IMediaSample pSample) { throw new NotImplementedException(); }
/// <summary> /// Calculate and populate the timestamps /// </summary> /// <param name="pSample">The IMediaSample to set the timestamps on</param> /// <returns>HRESULT</returns> public override int SetTimeStamps(IMediaSample pSample) { // Time per frame int tpf = (UNIT / m_Fps); DsLong rtStart = new DsLong(m_rtSampleTime); m_rtSampleTime += tpf; DsLong rtStop = new DsLong(m_rtSampleTime); // Set the times into the sample int hr = pSample.SetTime(rtStart, rtStop); // Set TRUE on every sample for uncompressed frames if (hr >= 0) { hr = pSample.SetSyncPoint(true); } return hr; }
int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { Marshal.ReleaseComObject(pSample); return 0; }
/// <summary> /// �R���X�g���N�^ (�����l�w��) /// </summary> /// <param name="sample_time">�T���v���^�C��</param> /// <param name="sample_data">�T���v���f�[�^</param> public CxSampleGrabberEventArgs(double sample_time, IMediaSample sample_data) { SampleTime = sample_time; SampleData = sample_data; if (sample_data != null) { sample_data.GetPointer(ref m_Address); m_Length = sample_data.GetSize(); } }
/// <summary> sample callback, NOT USED. </summary> int ISampleGrabberCB.SampleCB(double SampleTime, IMediaSample pSample) { if (!m_bGotOne) { // Set bGotOne to prevent further calls until we // request a new bitmap. m_bGotOne = true; IntPtr pBuffer; pSample.GetPointer(out pBuffer); int iBufferLen = pSample.GetSize(); if (pSample.GetSize() > m_stride*m_videoHeight) { throw new Exception("Buffer is wrong size"); } NativeMethods.CopyMemory(m_handle, pBuffer, m_stride*m_videoHeight); // Picture is ready. m_PictureReady.Set(); } Marshal.ReleaseComObject(pSample); return 0; }
int ISampleGrabberCB.SampleCB( double SampleTime, IMediaSample pSample ) { Trace.Write ("Sample"); return 0; }
// Set the timestamps public override int SetTimeStamps(IMediaSample pSample) { int hr; int iRead = pSample.GetActualDataLength(); long SampleTime = GetTimeFromBytes(iRead); long TimeStart = m_FSampleTime; m_FSampleTime = m_FSampleTime + SampleTime; long TimeStop = m_FSampleTime; hr = pSample.SetTime(TimeStart, TimeStop); if (hr >= 0) { TimeStart = m_FMediaTime; m_FMediaTime = m_FMediaTime + SampleTime; TimeStop = m_FMediaTime; hr = pSample.SetMediaTime(TimeStart, TimeStop); DsError.ThrowExceptionForHR(hr); } return hr; }
/// <summary> /// Calculate the timestamps based on the frame number and the frames per second. /// </summary> /// <param name="sample">The <see cref="IMediaSample"/> to be timed.</param> /// <returns>0 = success, negative values for errors</returns> public override int SetTimeStamps(IMediaSample sample) { // Calculate the start/end times based on the current frame number // and frame rate DsLong start = new DsLong(this.FrameNumber * this.framesPerSecond); DsLong stop = new DsLong(start + this.framesPerSecond); // Set the times into the sample int hr = sample.SetTime(start, stop); return hr; }