public void PushFrame(IFrameInfo info, IFrameSound frame) { if (info == null) { throw new ArgumentNullException("info"); } if (frame == null) { throw new ArgumentNullException("frame"); } if (IsSynchronized) { WaitFrame(); } var buffer = LockBuffer(); if (buffer == null) { return; } var srcBuffer = frame.GetBuffer(); Array.Copy(srcBuffer, buffer, buffer.Length); UnlockBuffer(buffer); }
public void PushFrame(IFrameInfo info, IFrameSound frame) { if (info == null) { throw new ArgumentNullException("info"); } if (frame == null) { throw new ArgumentNullException("frame"); } if (IsSynchronized) { WaitFrame(); } var buffer = LockBuffer(); if (buffer == null) { return; } var srcBuffer = frame.GetBuffer(); fixed(uint *pSrc = srcBuffer) fixed(byte *pbDst = buffer) { NativeMethods.CopyMemory((uint *)pbDst, pSrc, buffer.Length); } UnlockBuffer(buffer); }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) //IVideoFrame videoFrame, ISoundFrame soundFrame) { if (_threadRecord == null) { return; } var frame = new MediaFrame(); frame.Width = videoFrame.Size.Width; frame.Height = videoFrame.Size.Height; frame.Ratio = videoFrame.Ratio; frame.Image = new int[frame.Width * frame.Height]; Array.Copy(videoFrame.Buffer, frame.Image, frame.Image.Length); frame.SampleRate = _sampleRate;// soundFrame.SampleRate; //frame.Audio = new uint[frame.SampleRate / 50]; //Array.Copy(soundFrame.Buffer, frame.Audio, frame.Audio.Length); _queueMP4.Enqueue(frame); // Process audio... var bufferSrc = soundFrame.GetBuffer(); var bufferWr = new uint[bufferSrc.Length]; Array.Copy(bufferSrc, bufferWr, bufferSrc.Length); _queueWAV.Enqueue(bufferWr); _eventFrame.Set(); }
public void PushFrame(IFrameInfo info, IFrameVideo videoFrame, IFrameSound soundFrame) { if (_threadRecord == null) { return; } var bufferSrc = soundFrame.GetBuffer(); var bufferWr = new uint[bufferSrc.Length]; Array.Copy(bufferSrc, bufferWr, bufferSrc.Length); _queue.Enqueue(bufferWr); _eventFrame.Set(); }
public void PushFrame(IFrameInfo info, IFrameVideo frame) { if (frame == null) { throw new ArgumentNullException("frame"); } if (_frameResampler.SourceRate > 0 && IsSynchronized && !info.IsRefresh) { do { var waitEvents = new[] { _frameEvent, _cancelEvent }; if (WaitHandle.WaitAny(waitEvents) != 0) { return; } } while (!_frameResampler.Next()); } _osdLayer.FrameStartTact = info.StartTact; _osdLayer.SampleRate = info.SampleRate; if (!info.IsRefresh) { _osdLayer.UpdateFrame(info.UpdateTime); } FrameSize = new Size( frame.Size.Width, (int)(frame.Size.Height * frame.Ratio + 0.5F)); _videoLayer.Update(frame); _iconLayer.Update(info.Icons); // slow GDI rendering... if (!_allocator.IsRendering) { UpdateGdi(frame); } }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) { var timeSync = m_timeSync; var sound = m_sound; var video = m_video; if (infoFrame.IsRefresh) { // request from UI, so we don't need sound and sync if (video != null && videoFrame != null) { video.PushFrame(infoFrame, videoFrame); } return; } if (SyncSource == SyncSource.Time && timeSync != null) { timeSync.WaitFrame(); } if (video != null && videoFrame != null) { video.PushFrame(infoFrame, videoFrame); } if (sound != null && soundFrame != null) { sound.PushFrame(infoFrame, soundFrame); } var mediaRecorder = MediaRecorder; if (mediaRecorder != null && videoFrame != null && soundFrame != null) { mediaRecorder.PushFrame(infoFrame, videoFrame, soundFrame); } }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) //IVideoFrame videoFrame, ISoundFrame soundFrame) { if (_threadRecord == null) { return; } var frame = new MediaFrame(); frame.Width = videoFrame.Size.Width; frame.Height = videoFrame.Size.Height; frame.Ratio = videoFrame.Ratio; frame.Image = new int[frame.Width * frame.Height]; Array.Copy(videoFrame.Buffer, frame.Image, frame.Image.Length); frame.SampleRate = soundFrame.SampleRate; //frame.Audio = new uint[frame.SampleRate / 50]; //Array.Copy(soundFrame.Buffer, frame.Audio, frame.Audio.Length); _queue.Enqueue(frame); _eventFrame.Set(); }
public void PushFrame(IFrameInfo info, IFrameVideo frame) { if (frame == null) { throw new ArgumentNullException("frame"); } if (!info.IsRefresh) { m_graphUpdate.PushPeriod(); } if (IsSynchronized && !info.IsRefresh) { WaitFrame(); } m_debugFrameStart = info.StartTact; m_videoData = frame; var videoLen = m_videoData.Size.Width * m_videoData.Size.Height; // we need to translate bgra colors to rgba // because brga color support was removed from XNA4 if (m_translateBuffer == null || m_translateBuffer.Length < videoLen) { m_translateBuffer = new int[videoLen]; } fixed(int *pBuffer = m_videoData.Buffer) { Marshal.Copy( (IntPtr)pBuffer, m_translateBuffer, 0, videoLen); } fixed(int *pBuffer = m_translateBuffer) { var puBuffer = (uint *)pBuffer; // bgra -> rgba for (var i = 0; i < videoLen; i++) { puBuffer[i] = (puBuffer[i] & 0x000000ff) << 16 | (puBuffer[i] & 0xFF00FF00) | (puBuffer[i] & 0x00FF0000) >> 16; } } // copy translated image to output texture lock (m_syncTexture) { var texture = m_texture[m_textureIndex]; if (texture == null) { return; } texture.SetData <int>( m_translateBuffer, 0, videoLen); } }
public SampleGrabberCB(IFrameInfo frameInfo, bool dispatchOnWorkerThread) { _frameInfo = frameInfo; _dispatchOnWorkerThread = dispatchOnWorkerThread; }
/// <summary> /// Updates the scene every frame. /// </summary> /// <param name="frameInfo">The frame info.</param> public abstract void Update(IFrameInfo frameInfo);
/// <summary> /// Updates the scene every frame. /// </summary> /// <param name="frameInfo">The frame info.</param> public override void Update(IFrameInfo frameInfo) { }
private void WorkerThread(object start) { if (BehaviorVector[(int)BehaviorVectorMask.DispatchOnWorkerThread]) { _dispatchOnWorkerThread = true; } FrameInfo = ((Func <IFrameInfo>)start)(); var sampleGrabberCB = new SampleGrabberCB(FrameInfo, _dispatchOnWorkerThread); OnEnterWorkerThread(); try { var graph = (Activator.CreateInstance(Type.GetTypeFromCLSID(Registry.FilterGraph)) as IGraphBuilder); var sourceObject = FilterInfo.CreateFilter(_device.Moniker); var sampleGrabber = (Activator.CreateInstance(Type.GetTypeFromCLSID(Registry.SampleGrabber)) as ISampleGrabber); var grabberObject = (sampleGrabber as IBaseFilter); graph.AddFilter(sourceObject, "source"); graph.AddFilter(grabberObject, "grabber"); // set media type for our grabber using (var mediaType = new AMMediaType()) { mediaType.MajorType = Registry.MediaTypes.Video; mediaType.SubType = Registry.MediaSubTypes.RGB32; sampleGrabber.SetMediaType(mediaType); // and then connect device filter to out pin and grabber to in pin. then get capabilities of video received (this stuff comes from your web camera manufacturer) if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { _frameInfo.SetProperties((VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader))); } } // out pin to grabber without buffering and callback to grabber object (this one will get all images from our source). graph.Render(grabberObject.GetPin(PinDirection.Output, 0)); sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(sampleGrabberCB, 1); // dump output window var wnd = (IVideoWindow)graph; wnd.put_AutoShow(false); wnd = null; // and run the controller var control = (IMediaControl)graph; control.Run(); // if (_dispatchOnWorkerThread) { Dispatcher.Run(); //while (!WaitHandle.WaitAll(new WaitHandle[] { _stopSignal }, 10, true)) while (!_stopEvent.WaitOne(10, true)) { Dispatcher.Run(); } Dispatcher.ExitAllFrames(); } else { while (!_stopEvent.WaitOne(0, true)) { Thread.Sleep(10); } } // control.StopWhenReady(); } } catch (Exception ex) { //Console.WriteLine(ex); Debug.WriteLine(ex); throw; } finally { // guaranteed exit OnExitWorkerThread(); } }