private void UpdateTextureData(IFrameVideo videoData) { if (_texture0 == null) { return; } var lockRect = _texture0.LockRectangle(0, D3DLOCK.NONE); try { unsafe { fixed(int *srcPtr = videoData.Buffer) { NativeMethods.CopyStride( (int *)lockRect.pBits, srcPtr, _frameSize.Width, _frameSize.Height, _textureStride); } } } finally { _texture0.UnlockRectangle(0); } }
protected override void RenderSynchronized(int width, int height) { base.RenderSynchronized(width, height); IFrameVideo videoData; if (_showQueue.TryDequeue(out videoData)) { if (_lastVideoData != null) { _updateQueue.Enqueue(_lastVideoData); } _lastVideoData = videoData; } videoData = _lastVideoData; if (videoData == null) { return; } if (_frameSize != videoData.Size || _texture0 == null) { UpdateTextureSize(videoData.Size); } _frameSizeNormalized = new SizeF(_frameSize.Width, _frameSize.Height * videoData.Ratio); UpdateTextureData(videoData); var size = new Size(width, height); var dstRect = ScaleHelper.GetDestinationRect(ScaleMode, size, _frameSizeNormalized); RenderSprite(_sprite, _texture0, _frameSize, dstRect, AntiAlias); if (MimicTv) { RenderSprite(_spriteTv, _textureMaskTv, new Size(_frameSize.Width, (int)(_frameSize.Height * MimicTvRatio + 0.5F)), dstRect, true); } }
public void Update(IFrameVideo videoData) { IFrameVideo clone; if (!_updateQueue.TryDequeue(out clone)) { return; } if (clone.Size != videoData.Size || clone.Ratio != videoData.Ratio) { clone = new FrameVideo(videoData.Size, videoData.Ratio); } Array.Copy( videoData.Buffer, clone.Buffer, clone.Buffer.Length); if (VideoFilter == VideoFilter.NoFlick) { FilterNoFlick( clone.Buffer, clone.Size.Width, clone.Size.Height); } _showQueue.Enqueue(clone); }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) //IVideoFrame videoFrame, ISoundFrame soundFrame) { if (_threadRecord == null) { return; } var frame = new MediaFrame(); frame.Width = videoFrame.Size.Width; frame.Height = videoFrame.Size.Height; frame.Ratio = videoFrame.Ratio; frame.Image = new int[frame.Width * frame.Height]; Array.Copy(videoFrame.Buffer, frame.Image, frame.Image.Length); frame.SampleRate = _sampleRate;// soundFrame.SampleRate; //frame.Audio = new uint[frame.SampleRate / 50]; //Array.Copy(soundFrame.Buffer, frame.Audio, frame.Audio.Length); _queueMP4.Enqueue(frame); // Process audio... var bufferSrc = soundFrame.GetBuffer(); var bufferWr = new uint[bufferSrc.Length]; Array.Copy(bufferSrc, bufferWr, bufferSrc.Length); _queueWAV.Enqueue(bufferWr); _eventFrame.Set(); }
private static PocoFrameVideo ToPoco(this IFrameVideo source, ConversionContext context) { if (source == null) return null; var poco = source as PocoFrameVideo; if ((poco != null) || context.GetOrCreate(source, () => new PocoFrameVideo(), out poco)) return poco; source.CopyTo(poco, nameof(IFrameVideo.FrameItemVideoFilePath)); return poco; }
public void PushFrame(IFrameInfo info, IFrameVideo videoFrame, IFrameSound soundFrame) { if (_threadRecord == null) { return; } var bufferSrc = soundFrame.GetBuffer(); var bufferWr = new uint[bufferSrc.Length]; Array.Copy(bufferSrc, bufferWr, bufferSrc.Length); _queue.Enqueue(bufferWr); _eventFrame.Set(); }
private void UpdateGdi(IFrameVideo frame) { lock (_slowRenderSync) { if (_slowSurface == null || _slowSurface.Size != frame.Size) { if (_slowSurface != null) { _slowSurface.Dispose(); _slowSurface = null; } _slowSurface = new Bitmap(frame.Size.Width, frame.Size.Height, System.Drawing.Imaging.PixelFormat.Format32bppRgb); } //var im = new Bitmap(columns, rows, stride, // PixelFormat.Format8bppIndexed, // Marshal.UnsafeAddrOfPinnedArrayElement(newbytes, 0)); var data = _slowSurface.LockBits( new Rectangle(0, 0, frame.Size.Width, frame.Size.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppRgb); try { unsafe { var stride = data.Stride; int *pDst = (int *)data.Scan0; fixed(int *pSrc = frame.Buffer) { for (var y = 0; y < frame.Size.Height; y++) { var offsetSrc = y * frame.Size.Width; var offsetDst = y * frame.Size.Width; for (var x = 0; x < frame.Size.Width; x++) { pDst[offsetDst + x] = pSrc[offsetSrc + x]; } } } } } finally { _slowSurface.UnlockBits(data); } } Invalidate(); }
public void PushFrame(IFrameInfo info, IFrameVideo frame) { if (frame == null) { throw new ArgumentNullException("frame"); } if (_frameResampler.SourceRate > 0 && IsSynchronized && !info.IsRefresh) { do { var waitEvents = new[] { _frameEvent, _cancelEvent }; if (WaitHandle.WaitAny(waitEvents) != 0) { return; } } while (!_frameResampler.Next()); } _osdLayer.FrameStartTact = info.StartTact; _osdLayer.SampleRate = info.SampleRate; if (!info.IsRefresh) { _osdLayer.UpdateFrame(info.UpdateTime); } FrameSize = new Size( frame.Size.Width, (int)(frame.Size.Height * frame.Ratio + 0.5F)); _videoLayer.Update(frame); _iconLayer.Update(info.Icons); // slow GDI rendering... if (!_allocator.IsRendering) { UpdateGdi(frame); } }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) { var timeSync = m_timeSync; var sound = m_sound; var video = m_video; if (infoFrame.IsRefresh) { // request from UI, so we don't need sound and sync if (video != null && videoFrame != null) { video.PushFrame(infoFrame, videoFrame); } return; } if (SyncSource == SyncSource.Time && timeSync != null) { timeSync.WaitFrame(); } if (video != null && videoFrame != null) { video.PushFrame(infoFrame, videoFrame); } if (sound != null && soundFrame != null) { sound.PushFrame(infoFrame, soundFrame); } var mediaRecorder = MediaRecorder; if (mediaRecorder != null && videoFrame != null && soundFrame != null) { mediaRecorder.PushFrame(infoFrame, videoFrame, soundFrame); } }
private void UpdateTextureData(IFrameVideo videoData) { if (_texture0 == null) { return; } using (var gs = _texture0.LockRectangle(0, LockFlags.None)) { unsafe { fixed(int *srcPtr = videoData.Buffer) { NativeMethods.CopyStride( (int *)gs.InternalData.ToPointer(), srcPtr, _frameSize.Width, _frameSize.Height, _textureStride); } } } _texture0.UnlockRectangle(0); }
public void PushFrame( IFrameInfo infoFrame, IFrameVideo videoFrame, IFrameSound soundFrame) //IVideoFrame videoFrame, ISoundFrame soundFrame) { if (_threadRecord == null) { return; } var frame = new MediaFrame(); frame.Width = videoFrame.Size.Width; frame.Height = videoFrame.Size.Height; frame.Ratio = videoFrame.Ratio; frame.Image = new int[frame.Width * frame.Height]; Array.Copy(videoFrame.Buffer, frame.Image, frame.Image.Length); frame.SampleRate = soundFrame.SampleRate; //frame.Audio = new uint[frame.SampleRate / 50]; //Array.Copy(soundFrame.Buffer, frame.Audio, frame.Audio.Length); _queue.Enqueue(frame); _eventFrame.Set(); }
public void PushFrame(IFrameInfo info, IFrameVideo frame) { if (frame == null) { throw new ArgumentNullException("frame"); } if (!info.IsRefresh) { m_graphUpdate.PushPeriod(); } if (IsSynchronized && !info.IsRefresh) { WaitFrame(); } m_debugFrameStart = info.StartTact; m_videoData = frame; var videoLen = m_videoData.Size.Width * m_videoData.Size.Height; // we need to translate bgra colors to rgba // because brga color support was removed from XNA4 if (m_translateBuffer == null || m_translateBuffer.Length < videoLen) { m_translateBuffer = new int[videoLen]; } fixed(int *pBuffer = m_videoData.Buffer) { Marshal.Copy( (IntPtr)pBuffer, m_translateBuffer, 0, videoLen); } fixed(int *pBuffer = m_translateBuffer) { var puBuffer = (uint *)pBuffer; // bgra -> rgba for (var i = 0; i < videoLen; i++) { puBuffer[i] = (puBuffer[i] & 0x000000ff) << 16 | (puBuffer[i] & 0xFF00FF00) | (puBuffer[i] & 0x00FF0000) >> 16; } } // copy translated image to output texture lock (m_syncTexture) { var texture = m_texture[m_textureIndex]; if (texture == null) { return; } texture.SetData <int>( m_translateBuffer, 0, videoLen); } }
/// <summary> /// Converts the <see cref="IFrameVideo" /> into a new instance of <see cref="PocoFrameVideo" /> which is serializeable to /// json or binary. /// </summary> public static PocoFrameVideo ToPoco(this IFrameVideo source) { return source.ToPoco(new ConversionContext()); }