void SourceNewFrame(object sender, NewFrameEventArgs eventArgs) { var bm = (Bitmap)eventArgs.Frame.Clone(); NewFrame?.Invoke(this, new NewFrameEventArgs(bm, eventArgs.TimeStamp)); bm.Dispose(); }
private static void SendingData(SenderObject senderObject) { switch (senderObject.Flag) { case SenderFlags.Image: Application.Current.Dispatcher.Invoke(delegate { using (MemoryStream ms = new MemoryStream((byte[])senderObject.Data)) { object[] objects = (object[])formatter.Deserialize(ms); NewFrame?.Invoke((List <VideoFrame.FrameMapping>)objects[0], (Point)objects[1], senderObject.SenderId); } }); break; case SenderFlags.Voice: if (senderObject.SenderId != Id) { NewVoice?.Invoke((byte[])senderObject.Data, senderObject.SenderId); } break; default: break; } }
private void DisplayVideo(IntPtr userdata, IntPtr picture) { if (!IsRunning || IsAudio) { return; } _lastFrame = DateTime.UtcNow; _connecting = false; if (ShouldEmitFrame) { var l = _size.Width * _size.Height * 4; GC.AddMemoryPressure(l); using (var mat = new Bitmap(_size.Width, _size.Height, _size.Width * 4, PixelFormat.Format32bppArgb, userdata)) { var nfe = new NewFrameEventArgs(mat); NewFrame.Invoke(this, nfe); } GC.RemoveMemoryPressure(l); if (Seekable) { Time = _mediaPlayer.Time; Duration = _mediaPlayer.Length; } } }
private void Timer_Elapsed(object sender, EventArgs e) { if (networkStream.DataAvailable) { int length = (int)formatter.Deserialize(networkStream); byte[] buffer = new byte[length]; int position = 0; while (position < length) { position += networkStream.Read(buffer, position, length - position); } //networkStream.Read(buffer, 0, (int)length); MemoryStream mem = new MemoryStream(buffer); mem.Position = 0; MemoryStream decompressed = new MemoryStream(); ShortEncoder.Decode(mem, decompressed); decompressed.Position = 0; screenImg = encoder.LoadFromStr(decompressed); //screenImg = new Bitmap(mem); NewFrame.Invoke(screenImg); } else { ClientCommand clientCommand = new ClientCommand { needFrame = true, needHeight = height, needWidth = width }; formatter.Serialize(networkStream, JsonConvert.SerializeObject(clientCommand)); } }
private void WorkerThread() { int width = region.Width; int height = region.Height; int x = region.Location.X; int y = region.Location.Y; Size size = region.Size; Bitmap bitmap = new Bitmap(width, height, PixelFormat.Format32bppArgb); Graphics graphics = Graphics.FromImage(bitmap); DateTime start; TimeSpan span; while (!stopEvent.WaitOne(0, false)) { start = DateTime.Now; try { graphics.CopyFromScreen(x, y, 0, 0, size, CopyPixelOperation.SourceCopy); framesReceived++; NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); if (frameInterval > 0) { span = DateTime.Now.Subtract(start); int msec = frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (stopEvent.WaitOne(msec, false))) { break; } } } catch (ThreadAbortException) { break; } catch (Exception exception) { VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); Thread.Sleep(250); } if (stopEvent.WaitOne(0, false)) { break; } } graphics.Dispose(); bitmap.Dispose(); PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); }
// Worker thread private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; try { _camera.StartAcquisition( ); // while there is no request for stop while (!_stopEvent.WaitOne(0, false)) { // start time DateTime start = DateTime.Now; // get next frame Bitmap bitmap = _camera.GetImage(15000, false); _framesReceived++; _bytesReceived += bitmap.Width * bitmap.Height * (Image.GetPixelFormatSize(bitmap.PixelFormat) >> 3); NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); // free image bitmap.Dispose( ); // wait for a while ? if (_frameInterval > 0) { // get frame duration TimeSpan span = DateTime.Now.Subtract(start); // miliseconds to sleep int msec = _frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "XIMEA"); reasonToStop = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(reasonToStop)); }
private async Task ProcessVideoAsync(CancellationToken token) { await Task.Yield(); try { using (Socket clientSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { await Task.Factory.FromAsync(clientSocket.BeginConnect, clientSocket.EndConnect, $"roborio-{teamNumber}-FRC.local", CameraServerPort, null); using (var socketStream = new NetworkStream(clientSocket)) { await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(framesPerSecond)), 0, 4, token); bytesReceived += 4; await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(HwCompressionId)), 0, 4, token); bytesReceived += 4; await socketStream.WriteAsync(BitConverter.GetBytes(IPAddress.HostToNetworkOrder(Size640x480)), 0, 4, token); bytesReceived += 4; while (true) { token.ThrowIfCancellationRequested(); var magicToken = new byte[4]; await socketStream.ReadAsync(magicToken, 0, 4, token); bytesReceived += 4; if (BitConverter.ToInt32(magicToken, 0) != 0x1000) { //Magic token did not match return; } var imageLengthBytes = new byte[4]; await socketStream.ReadAsync(imageLengthBytes, 0, 4, token); bytesReceived += 4; using (var frame = new System.Drawing.Bitmap(socketStream)) { NewFrame?.Invoke(this, new NewFrameEventArgs(frame)); } bytesReceived += IPAddress.NetworkToHostOrder(BitConverter.ToInt32(imageLengthBytes, 0)); framesReceived++; } } } } catch (OperationCanceledException) { PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); } catch (Exception ex) { PlayingFinished?.Invoke(this, ReasonToFinishPlaying.VideoSourceError); VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(ex.Message)); } }
/// <summary> /// If the draw flag is set, update the screen /// </summary> public void TryPresentFrame() { if (Chip8.DrawFlag) { var newFrame = DrawGraphics(); OnNewFrame?.Invoke(newFrame); Chip8.DrawFlag = false; } }
/// <summary> /// 新帧处理 /// </summary> /// <param name="sender"></param> /// <param name="image"></param> private void OnNewFrame(object sender, ref Bitmap image) { if (FrameIndex > int.MaxValue) { FrameIndex = 0; } ++FrameIndex; NewFrame?.Invoke(ref image, FrameIndex); }
//定义触发事件的方法 protected virtual void OnNewFrame(FrameReceivedArgs e) { // 第二种做法 //EventHandler<FrameReceivedArgs> temp = NewFrame; //if (temp != null) //{ // temp(this, e); //} NewFrame?.Invoke(this, e); // 这是被注释掉的部分的简化版本,VS给我优化的 }
private void EventManager() { Bitmap frame; while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown) { try { if (_videoQueue.TryDequeue(out frame)) { if (frame != null) { NewFrame?.Invoke(this, new NewFrameEventArgs(frame)); frame.Dispose(); } } byte[] audio; if (!_audioQueue.TryDequeue(out audio)) { continue; } var da = DataAvailable; da?.Invoke(this, new DataAvailableEventArgs(audio)); var sampleBuffer = new float[audio.Length]; int read = SampleChannel.Read(sampleBuffer, 0, audio.Length); _waveProvider?.AddSamples(audio, 0, read); if (WaveOutProvider != null && Listening) { WaveOutProvider?.AddSamples(audio, 0, read); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } } try { while (_videoQueue != null && _videoQueue.TryDequeue(out frame)) { frame?.Dispose(); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } }
// Worker thread private void WorkerThread( ) { try { _camera.StartAcquisition( ); // while there is no request for stop while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { // start time DateTime start = DateTime.Now; // get next frame if (EmitFrame) { using (var bitmap = _camera.GetImage(15000, false)) { NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); } } // wait for a while ? if (FrameInterval > 0) { // get download duration var span = DateTime.UtcNow.Subtract(start); // milliseconds to sleep var msec = FrameInterval - (int)span.TotalMilliseconds; if (msec > 0) { _abort.WaitOne(msec); } } } } catch (Exception ex) { Logger.LogException(ex, "XIMEA"); _res = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
void SensorColorFrameReady(object sender, ColorImageFrameReadyEventArgs e) { if ((DateTime.UtcNow - _lastFrameTimeStamp).TotalMilliseconds >= MaxInterval) { _lastFrameTimeStamp = DateTime.UtcNow; using (ColorImageFrame imageFrame = e.OpenColorImageFrame()) { if (imageFrame != null) { Bitmap bmap; switch (imageFrame.Format) { default: bmap = ColorImageToBitmap(imageFrame); break; case ColorImageFormat.InfraredResolution640x480Fps30: bmap = GrayScaleImageToBitmap(imageFrame); break; } if (bmap != null) { using (Graphics g = Graphics.FromImage(bmap)) { lock (_skeletons) { foreach (Skeleton skel in _skeletons) { DrawBonesAndJoints(skel, g); } } if (_tripwires) { foreach (var dl in TripWires) { g.DrawLine(TripWirePen, dl.StartPoint, dl.EndPoint); } } } // notify client NewFrame?.Invoke(this, new NewFrameEventArgs(bmap)); // release the image bmap.Dispose(); } } } } }
// Thread entry point public void WorkerThread() { AVIReader aviReader = new AVIReader(); try { // open file aviReader.Open(source); while (true) { // start time DateTime start = DateTime.Now; // get next frame Bitmap bmp = aviReader.GetNextFrame(); framesReceived++; // need to stop ? if (stopEvent.WaitOne(0, false)) { break; } NewFrame?.Invoke(this, new CameraEventArgs(bmp)); // free image bmp.Dispose(); // end time TimeSpan span = DateTime.Now.Subtract(start); // sleep for a while /* int m = (int) span.TotalMilliseconds; * * if (m < 100) * Thread.Sleep(100 - m);*/ } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("exception : " + ex.Message); } aviReader.Dispose(); aviReader = null; }
protected void OnNewAudioFrame(short[] audio, int bufferLen, double sampleTime) { if (IsRunning) { framesReceived++; if ((!stopEvent.WaitOne(0, true)) && (NewFrame != null)) { NewFrame(this, new NewAudioFrameEventArgs(audio, bufferLen, sampleTime)); } } else { NewFrame?.Invoke(this, new NewAudioFrameEventArgs(audio, bufferLen, sampleTime)); } }
//This is where the magic happens. Handles each frame from the camera and performs image processing. //Once frames are finished being processed and converted to UI friendly format the event is triggered to alert ui void ConvertFrame(object sender, EventArgs e) { Mat capturedImage = new Mat(); _processedFrames = new List <BitmapSource>(); _source.Retrieve(capturedImage, 0); List <IImage> processedImages = _detectionManager.ProcessFrame(capturedImage); processedImages.ForEach((img) => { _processedFrames.Add(MatConverter.ToBitmapSource(img)); }); _originalFrame = MatConverter.ToBitmapSource(capturedImage); NewFrame?.Invoke(null, new EventArgs()); }
// On new frame private void Video_NewFrame(object sender, CameraEventArgs e) { try { // lock Monitor.Enter(this); // dispose old frame if (lastFrame != null) { lastFrame.Dispose( ); } lastFrame = (Bitmap)e.Bitmap.Clone( ); // apply motion detector if (motionDetecotor != null) { motionDetecotor.ProcessFrame(ref lastFrame); // check motion level if ( (motionDetecotor.MotionLevel >= alarmLevel) && (Alarm != null) ) { Alarm(this, new EventArgs( )); } } // image dimension width = lastFrame.Width; height = lastFrame.Height; } catch (Exception) { } finally { // unlock Monitor.Exit(this); } // notify client NewFrame?.Invoke(this, new EventArgs()); }
private async Task DoWork() { while (_isWorking) { System.Threading.Interlocked.Increment(ref _count); if (_count % 2 == 0) { NewFrame?.Invoke(this, new NewFrameEventArgs(_jpegFrame1)); } else { NewFrame?.Invoke(this, new NewFrameEventArgs(_jpegFrame2)); } await Task.Delay(TimeSpan.FromMilliseconds(_delay)); } }
private void logFps() { if (currentFPS == 0) { return; } if (currentFPS < minFps) { minFps = currentFPS; } if (currentFPS > maxFps) { maxFps = currentFPS; } _frameLog.Add(new FrameInformation(totalFramesRecorded, currentFPS, Time.deltaTime)); NewFrame?.Invoke(this, new EventArgs()); }
public void UpdateFrame(object sender, NewFrameEventArgs e) { Bitmap bitmap; lock (sender) bitmap = new Bitmap(e.Frame); bitmap = HelperMethods.ProcessImage(bitmap); if (faceRectangles != null) { using (Graphics g = Graphics.FromImage(bitmap)) using (Pen pen = new Pen(new SolidBrush(Color.Red), 3)) foreach (Rectangle face in faceRectangles) { g.DrawRectangle(pen, face); } } NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); bitmap.Dispose(); }
// Worker thread private void WorkerThread( ) { try { _camera.StartAcquisition( ); // while there is no request for stop while (!_abort.WaitOne(10) && !MainForm.ShuttingDown) { // start time DateTime start = DateTime.Now; // get next frame if (ShouldEmitFrame) { using (var bitmap = _camera.GetImage(15000, false)) { NewFrame?.Invoke(this, new NewFrameEventArgs(bitmap)); } } } } catch (Exception ex) { Logger.LogException(ex, "XIMEA"); _res = ReasonToFinishPlaying.VideoSourceError; } finally { try { _camera?.StopAcquisition( ); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void VideoSourceNewFrame(object sender, NewFrameEventArgs eventArgs) { if (!requestedToStop) { var newFrame = (Bitmap)eventArgs.Frame.Clone(); NewFrame?.Invoke(this, ref newFrame); lock (sync_context) { if (currentFrame != null) { if (currentFrame.Size != eventArgs.Frame.Size) { needSizeUpdate = true; } currentFrame.Dispose(); currentFrame = null; } if (convertedFrame != null) { convertedFrame.Dispose(); convertedFrame = null; } currentFrame = newFrame; frameSize = currentFrame.Size; lastMessage = null; if ((currentFrame.PixelFormat == PixelFormat.Format16bppGrayScale) || (currentFrame.PixelFormat == PixelFormat.Format48bppRgb) || (currentFrame.PixelFormat == PixelFormat.Format64bppArgb)) { convertedFrame = Imaging.Image.Convert16bppTo8bpp(currentFrame); } } Invalidate(); } }
private void ProcessFrames() { do { if (_actualBitmap != null && _newFrame && NewFrame != null) { try { Bitmap b = null; lock (_lockHelper) { b = (Bitmap)_actualBitmap.Clone(); } NewFrame.Invoke(_instance, new NewFrameEventArgs(b)); } catch (Exception ex) { Logger.LogException(ex, "New Frame Error"); } _newFrame = false; } } while (!_stopReadingFrames && !MainForm.ShuttingDown); }
private void ReadFrames() { var pConvertedFrameBuffer = IntPtr.Zero; SwsContext *pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; SampleChannel sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; int ret; if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, _audioFrame); if (ret == 0) { var dat = _audioFrame->data[0]; var numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &dat, _audioFrame->nb_samples); var l = numSamplesOut * 2 * _audioCodecContext->channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } if (_audioFrame->decode_error_flags > 0) { break; } } while (ret == 0); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, read)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, _videoFrame); if (ret == 0 && EmitFrame) { if (!videoInited) { videoInited = true; var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _videoCodecContext->width, _videoCodecContext->height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, _videoCodecContext->pix_fmt, _videoCodecContext->width, _videoCodecContext->height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, _videoFrame->data, _videoFrame->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (_videoFrame->decode_error_flags > 0) { break; } using ( var mat = new Bitmap(_videoCodecContext->width, _videoCodecContext->height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; } } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); try { Program.MutexHelper.Wait(); if (pConvertedFrameBuffer != IntPtr.Zero) { Marshal.FreeHGlobal(pConvertedFrameBuffer); } if (_formatContext != null) { if (_formatContext->streams != null) { var j = (int)_formatContext->nb_streams; for (var i = j - 1; i >= 0; i--) { var stream = _formatContext->streams[i]; if (stream != null && stream->codec != null && stream->codec->codec != null) { stream->discard = AVDiscard.AVDISCARD_ALL; ffmpeg.avcodec_close(stream->codec); } } } fixed(AVFormatContext **f = &_formatContext) { ffmpeg.avformat_close_input(f); } _formatContext = null; } if (_videoFrame != null) { fixed(AVFrame **pinprt = &_videoFrame) { ffmpeg.av_frame_free(pinprt); _videoFrame = null; } } if (_audioFrame != null) { fixed(AVFrame **pinprt = &_audioFrame) { ffmpeg.av_frame_free(pinprt); _audioFrame = null; } } _videoStream = null; _audioStream = null; _audioCodecContext = null; _videoCodecContext = null; if (_swrContext != null) { fixed(SwrContext **s = &_swrContext) { ffmpeg.swr_free(s); } _swrContext = null; } if (pConvertContext != null) { ffmpeg.sws_freeContext(pConvertContext); } if (sampleChannel != null) { sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; sampleChannel = null; } } catch (Exception ex) { Logger.LogException(ex, "Media Stream (close)"); } finally { try { Program.MutexHelper.Release(); } catch { } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void ReadFrames() { pConvertedFrameBuffer = IntPtr.Zero; pConvertContext = null; var audioInited = false; var videoInited = false; byte[] buffer = null, tbuffer = null; var dstData = new byte_ptrArray4(); var dstLinesize = new int_array4(); BufferedWaveProvider waveProvider = null; sampleChannel = null; var packet = new AVPacket(); do { ffmpeg.av_init_packet(&packet); if (_audioCodecContext != null && buffer == null) { buffer = new byte[_audioCodecContext->sample_rate * 2]; tbuffer = new byte[_audioCodecContext->sample_rate * 2]; } if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet))) { break; } if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT) { break; } var nf = NewFrame; var da = DataAvailable; _lastPacket = DateTime.UtcNow; var ret = -11; //EAGAIN if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null && !_ignoreAudio) { if (HasAudioStream != null) { HasAudioStream?.Invoke(this, EventArgs.Empty); HasAudioStream = null; } if (da != null) { var s = 0; fixed(byte **outPtrs = new byte *[32]) { fixed(byte *bPtr = &tbuffer[0]) { outPtrs[0] = bPtr; var af = ffmpeg.av_frame_alloc(); ffmpeg.avcodec_send_packet(_audioCodecContext, &packet); do { ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af); if (ret == 0) { int numSamplesOut = 0; try { if (_swrContext == null) { //need to do this here as send_packet can change channel layout and throw an exception below initSWR(); } var dat = af->data[0]; numSamplesOut = ffmpeg.swr_convert(_swrContext, outPtrs, _audioCodecContext->sample_rate, &dat, af->nb_samples); } catch (Exception ex) { Logger.LogException(ex, "MediaStream - Audio Read"); _ignoreAudio = true; break; } if (numSamplesOut > 0) { var l = numSamplesOut * 2 * OutFormat.Channels; Buffer.BlockCopy(tbuffer, 0, buffer, s, l); s += l; } else { ret = numSamplesOut; //(error) } } if (af->decode_error_flags > 0) { break; } } while (ret == 0); ffmpeg.av_frame_free(&af); if (s > 0) { var ba = new byte[s]; Buffer.BlockCopy(buffer, 0, ba, 0, s); if (!audioInited) { audioInited = true; RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16, _audioCodecContext->channels); waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(200) }; sampleChannel = new SampleChannel(waveProvider); sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } waveProvider.AddSamples(ba, 0, s); var sampleBuffer = new float[s]; var read = sampleChannel.Read(sampleBuffer, 0, s); da(this, new DataAvailableEventArgs(ba, s)); if (Listening) { WaveOutProvider?.AddSamples(ba, 0, read); } } } } } } if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index && _videoCodecContext != null) { var ef = ShouldEmitFrame; ffmpeg.avcodec_send_packet(_videoCodecContext, &packet); do { var vf = ffmpeg.av_frame_alloc(); ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, vf); if (ret == 0 && ef) { AVPixelFormat srcFmt; if (_hwDeviceCtx != null) { srcFmt = AVPixelFormat.AV_PIX_FMT_NV12; var output = ffmpeg.av_frame_alloc(); ffmpeg.av_hwframe_transfer_data(output, vf, 0); ffmpeg.av_frame_copy_props(output, vf); ffmpeg.av_frame_free(&vf); vf = output; } else { srcFmt = (AVPixelFormat)vf->format; } if (!videoInited) { videoInited = true; _finalSize = Helper.CalcResizeSize(_source.settings.resize, new Size(_videoCodecContext->width, _videoCodecContext->height), new Size(_source.settings.resizeWidth, _source.settings.resizeHeight)); var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize); ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1); pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, NormalizePixelFormat(srcFmt), _finalSize.Width, _finalSize.Height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null); } Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, vf->data, vf->linesize, 0, _videoCodecContext->height, dstData, dstLinesize)); if (vf->decode_error_flags > 0) { ffmpeg.av_frame_free(&vf); break; } using ( var mat = new Bitmap(_finalSize.Width, _finalSize.Height, dstLinesize[0], PixelFormat.Format24bppRgb, pConvertedFrameBuffer)) { var nfe = new NewFrameEventArgs(mat); nf.Invoke(this, nfe); } _lastVideoFrame = DateTime.UtcNow; ffmpeg.av_frame_free(&vf); break; } ffmpeg.av_frame_free(&vf); } while (ret == 0); } if (nf != null && _videoStream != null) { if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds) { _res = ReasonToFinishPlaying.DeviceLost; _abort = true; } } ffmpeg.av_packet_unref(&packet); if (ret == -11) { Thread.Sleep(10); } } while (!_abort && !MainForm.ShuttingDown); NewFrame?.Invoke(this, new NewFrameEventArgs(null)); CleanUp(); }
// Worker thread private void WorkerThread() { var res = ReasonToFinishPlaying.StoppedByUser; while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown) { try { DateTime start = DateTime.UtcNow; // increment frames counter _framesReceived++; // provide new image to clients if (NewFrame != null) { Screen s = Screen.AllScreens[_screenindex]; if (_screenSize == Rectangle.Empty) { if (_area != Rectangle.Empty) { _screenSize = _area; } else { _screenSize = s.Bounds; //virtual clients can have odd dimensions if (_screenSize.Width % 2 != 0) { _screenSize.Width = _screenSize.Width - 1; } if (_screenSize.Height % 2 != 0) { _screenSize.Height = _screenSize.Height - 1; } } } using (var target = new Bitmap(_screenSize.Width, _screenSize.Height, PixelFormat.Format24bppRgb)) { using (Graphics g = Graphics.FromImage(target)) { try { g.CopyFromScreen(s.Bounds.X + _screenSize.X, s.Bounds.Y + _screenSize.Y, 0, 0, new Size(_screenSize.Width, _screenSize.Height)); } catch (Exception ex) { throw new Exception("Error grabbing screen (" + ex.Message + ") - disable screensaver."); //probably remote desktop or screensaver has kicked in } if (MousePointer) { var cursorBounds = new Rectangle( Cursor.Position.X - s.Bounds.X - _screenSize.X, Cursor.Position.Y - s.Bounds.Y - _screenSize.Y, Cursors.Default.Size.Width, Cursors.Default.Size.Height); Cursors.Default.Draw(g, cursorBounds); } } // notify client NewFrame?.Invoke(this, new NewFrameEventArgs(target)); _error = false; } } // wait for a while ? if (_frameInterval > 0) { // get download duration TimeSpan span = DateTime.UtcNow.Subtract(start); // milliseconds to sleep int msec = _frameInterval - (int)span.TotalMilliseconds; if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } } } catch (Exception ex) { if (!_error) { MainForm.LogExceptionToFile(ex, "Desktop"); _error = true; } // provide information to clients res = ReasonToFinishPlaying.DeviceLost; // wait for a while before the next try Thread.Sleep(250); break; } } PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); }
private void onNewFrame(CaptureFrameEventArgs e) { NewFrame?.Invoke(this, e); }
void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { if ((DateTime.UtcNow - _lastFrameTimeStamp).TotalMilliseconds >= MaxInterval) { _lastFrameTimeStamp = DateTime.UtcNow; // Copy the pixel data from the image to a temporary array depthFrame.CopyPixelDataTo(_depthPixels); // Convert the depth to RGB int colorPixelIndex = 0; foreach (short t in _depthPixels) { // discard the portion of the depth that contains only the player index short depth = (short)(t >> DepthImageFrame.PlayerIndexBitmaskWidth); // to convert to a byte we're looking at only the lower 8 bits // by discarding the most significant rather than least significant data // we're preserving detail, although the intensity will "wrap" // add 1 so that too far/unknown is mapped to black byte intensity = (byte)((depth + 1) & byte.MaxValue); // Write out blue byte _colorPixels[colorPixelIndex++] = intensity; // Write out green byte _colorPixels[colorPixelIndex++] = intensity; // Write out red byte _colorPixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } // Write the pixel data into our bitmap var bmap = new Bitmap( depthFrame.Width, depthFrame.Height, PixelFormat.Format32bppRgb); BitmapData bmapdata = bmap.LockBits( new Rectangle(0, 0, depthFrame.Width, depthFrame.Height), ImageLockMode.WriteOnly, bmap.PixelFormat); var ptr = bmapdata.Scan0; Marshal.Copy(_colorPixels, 0, ptr, _colorPixels.Length); bmap.UnlockBits(bmapdata); using (Graphics g = Graphics.FromImage(bmap)) { lock (_skeletons) { foreach (Skeleton skel in _skeletons) { DrawBonesAndJoints(skel, g); } } if (_tripwires) { foreach (var dl in TripWires) { g.DrawLine(TripWirePen, dl.StartPoint, dl.EndPoint); } } } // notify client NewFrame?.Invoke(this, new NewFrameEventArgs(bmap)); // release the image bmap.Dispose(); } } } }
// Worker thread private void WorkerThread() { int width = region.Width; int height = region.Height; int x = region.Location.X; int y = region.Location.Y; Size size = region.Size; // Create 10 frames (which we will keep overwriting and reusing) Context[] buffer = new Context[10]; for (int i = 0; i < buffer.Length; i++) { // Note: It's important to use 32-bpp ARGB to avoid problems with FFmpeg later //var bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb); var bmp = new Bitmap(width, height, PixelFormat.Format32bppArgb); buffer[i] = new Context { original = bmp, hwndGraphics = Graphics.FromHwnd(windowHandle), imgGraphics = Graphics.FromImage(bmp), args = new NewFrameEventArgs(bmp) }; } // download start time and duration DateTime start; TimeSpan span; int counter = 0; int bufferPos = 0; Context captureContext = buffer[bufferPos]; Context displayContext = null; while (!stopEvent.WaitOne(0, false)) { // set download start time start = DateTime.Now; try { // Start capturing a new frame at the same // time we send the previous one to listeners #if !NET35 Task.WaitAll( #if NET40 Task.Factory.StartNew(() => #else Task.Run(() => #endif { #endif // wait for a while ? if (frameInterval > 0) { // get download duration span = DateTime.Now.Subtract(start); // miliseconds to sleep int msec = frameInterval - (int)span.TotalMilliseconds; // if we should sleep, then sleep as long as needed if ((msec > 0) && (stopEvent.WaitOne(msec, false))) return; } // capture the screen var wndHdc = captureContext.hwndGraphics.GetHdc(); var imgHdc = captureContext.imgGraphics.GetHdc(); captureContext.args.CaptureStarted = DateTime.Now; BitBlt(imgHdc, 0, 0, width, height, wndHdc, x, y, (int)CopyPixelOperation.SourceCopy); captureContext.args.FrameSize = size; captureContext.args.CaptureFinished = DateTime.Now; captureContext.hwndGraphics.ReleaseHdc(wndHdc); captureContext.imgGraphics.ReleaseHdc(imgHdc); // increment frames counter captureContext.args.FrameIndex = counter++; framesReceived++; #if !NET35 } ), #if NET40 Task.Factory.StartNew(() => #else Task.Run(() => #endif { #endif // provide new image to clients if (displayContext != null) { // reset whatever listeners had done with the frame displayContext.args.Frame = displayContext.original; NewFrame?.Invoke(this, displayContext.args); } #if !NET35 })); #endif // Update buffer position displayContext = buffer[bufferPos]; bufferPos = (bufferPos + 1) % buffer.Length; captureContext = buffer[bufferPos]; Debug.Assert(displayContext != captureContext); } catch (ThreadAbortException) { break; } catch (Exception exception) { #if !NET35 AggregateException ae = exception as AggregateException; if (ae != null && ae.InnerExceptions.Count == 1) exception = ae.InnerExceptions[0]; #endif // provide information to clients if (VideoSourceError == null) throw; VideoSourceError(this, new VideoSourceErrorEventArgs(exception)); // wait for a while before the next try Thread.Sleep(250); } // need to stop ? if (stopEvent.WaitOne(0, false)) break; } // release resources foreach (var c in buffer) { c.imgGraphics.Dispose(); c.args.Frame.Dispose(); } PlayingFinished?.Invoke(this, ReasonToFinishPlaying.StoppedByUser); }
private void InvokeNewFrame(Bitmap frame) { var args = new NewFrameEventArgs(frame); NewFrame?.Invoke(this, args); }